• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2010 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 //#define LOG_NDEBUG 0
18 #define LOG_TAG "NuPlayerRenderer"
19 #include <utils/Log.h>
20 
21 #include "AWakeLock.h"
22 #include "NuPlayerRenderer.h"
23 #include <algorithm>
24 #include <cutils/properties.h>
25 #include <media/stagefright/foundation/ADebug.h>
26 #include <media/stagefright/foundation/AMessage.h>
27 #include <media/stagefright/foundation/AUtils.h>
28 #include <media/stagefright/MediaClock.h>
29 #include <media/stagefright/MediaCodecConstants.h>
30 #include <media/stagefright/MediaDefs.h>
31 #include <media/stagefright/MediaErrors.h>
32 #include <media/stagefright/MetaData.h>
33 #include <media/stagefright/Utils.h>
34 #include <media/stagefright/VideoFrameScheduler.h>
35 #include <media/MediaCodecBuffer.h>
36 #include <utils/SystemClock.h>
37 
38 #include <inttypes.h>
39 
40 namespace android {
41 
42 /*
43  * Example of common configuration settings in shell script form
44 
45    #Turn offload audio off (use PCM for Play Music) -- AudioPolicyManager
46    adb shell setprop audio.offload.disable 1
47 
48    #Allow offload audio with video (requires offloading to be enabled) -- AudioPolicyManager
49    adb shell setprop audio.offload.video 1
50 
51    #Use audio callbacks for PCM data
52    adb shell setprop media.stagefright.audio.cbk 1
53 
54    #Use deep buffer for PCM data with video (it is generally enabled for audio-only)
55    adb shell setprop media.stagefright.audio.deep 1
56 
57    #Set size of buffers for pcm audio sink in msec (example: 1000 msec)
58    adb shell setprop media.stagefright.audio.sink 1000
59 
60  * These configurations take effect for the next track played (not the current track).
61  */
62 
getUseAudioCallbackSetting()63 static inline bool getUseAudioCallbackSetting() {
64     return property_get_bool("media.stagefright.audio.cbk", false /* default_value */);
65 }
66 
getAudioSinkPcmMsSetting()67 static inline int32_t getAudioSinkPcmMsSetting() {
68     return property_get_int32(
69             "media.stagefright.audio.sink", 500 /* default_value */);
70 }
71 
72 // Maximum time in paused state when offloading audio decompression. When elapsed, the AudioSink
73 // is closed to allow the audio DSP to power down.
74 static const int64_t kOffloadPauseMaxUs = 10000000LL;
75 
76 // Additional delay after teardown before releasing the wake lock to allow time for the audio path
77 // to be completely released
78 static const int64_t kWakelockReleaseDelayUs = 2000000LL;
79 
80 // Maximum allowed delay from AudioSink, 1.5 seconds.
81 static const int64_t kMaxAllowedAudioSinkDelayUs = 1500000LL;
82 
83 static const int64_t kMinimumAudioClockUpdatePeriodUs = 20 /* msec */ * 1000;
84 
85 // Default video frame display duration when only video exists.
86 // Used to set max media time in MediaClock.
87 static const int64_t kDefaultVideoFrameIntervalUs = 100000LL;
88 
89 // static
90 const NuPlayer::Renderer::PcmInfo NuPlayer::Renderer::AUDIO_PCMINFO_INITIALIZER = {
91         AUDIO_CHANNEL_NONE,
92         AUDIO_OUTPUT_FLAG_NONE,
93         AUDIO_FORMAT_INVALID,
94         0, // mNumChannels
95         0 // mSampleRate
96 };
97 
98 // static
99 const int64_t NuPlayer::Renderer::kMinPositionUpdateDelayUs = 100000ll;
100 
audioFormatFromEncoding(int32_t pcmEncoding)101 static audio_format_t constexpr audioFormatFromEncoding(int32_t pcmEncoding) {
102     switch (pcmEncoding) {
103     case kAudioEncodingPcmFloat:
104         return AUDIO_FORMAT_PCM_FLOAT;
105     case kAudioEncodingPcm16bit:
106         return AUDIO_FORMAT_PCM_16_BIT;
107     case kAudioEncodingPcm8bit:
108         return AUDIO_FORMAT_PCM_8_BIT; // TODO: do we want to support this?
109     default:
110         ALOGE("%s: Invalid encoding: %d", __func__, pcmEncoding);
111         return AUDIO_FORMAT_INVALID;
112     }
113 }
114 
Renderer(const sp<MediaPlayerBase::AudioSink> & sink,const sp<MediaClock> & mediaClock,const sp<AMessage> & notify,uint32_t flags)115 NuPlayer::Renderer::Renderer(
116         const sp<MediaPlayerBase::AudioSink> &sink,
117         const sp<MediaClock> &mediaClock,
118         const sp<AMessage> &notify,
119         uint32_t flags)
120     : mAudioSink(sink),
121       mUseVirtualAudioSink(false),
122       mNotify(notify),
123       mFlags(flags),
124       mNumFramesWritten(0),
125       mDrainAudioQueuePending(false),
126       mDrainVideoQueuePending(false),
127       mAudioQueueGeneration(0),
128       mVideoQueueGeneration(0),
129       mAudioDrainGeneration(0),
130       mVideoDrainGeneration(0),
131       mAudioEOSGeneration(0),
132       mMediaClock(mediaClock),
133       mPlaybackSettings(AUDIO_PLAYBACK_RATE_DEFAULT),
134       mAudioFirstAnchorTimeMediaUs(-1),
135       mAnchorTimeMediaUs(-1),
136       mAnchorNumFramesWritten(-1),
137       mVideoLateByUs(0LL),
138       mNextVideoTimeMediaUs(-1),
139       mHasAudio(false),
140       mHasVideo(false),
141       mNotifyCompleteAudio(false),
142       mNotifyCompleteVideo(false),
143       mSyncQueues(false),
144       mPaused(false),
145       mPauseDrainAudioAllowedUs(0),
146       mVideoSampleReceived(false),
147       mVideoRenderingStarted(false),
148       mVideoRenderingStartGeneration(0),
149       mAudioRenderingStartGeneration(0),
150       mRenderingDataDelivered(false),
151       mNextAudioClockUpdateTimeUs(-1),
152       mLastAudioMediaTimeUs(-1),
153       mAudioOffloadPauseTimeoutGeneration(0),
154       mAudioTornDown(false),
155       mCurrentOffloadInfo(AUDIO_INFO_INITIALIZER),
156       mCurrentPcmInfo(AUDIO_PCMINFO_INITIALIZER),
157       mTotalBuffersQueued(0),
158       mLastAudioBufferDrained(0),
159       mUseAudioCallback(false),
160       mWakeLock(new AWakeLock()) {
161     CHECK(mediaClock != NULL);
162     mPlaybackRate = mPlaybackSettings.mSpeed;
163     mMediaClock->setPlaybackRate(mPlaybackRate);
164     (void)mSyncFlag.test_and_set();
165 }
166 
~Renderer()167 NuPlayer::Renderer::~Renderer() {
168     if (offloadingAudio()) {
169         mAudioSink->stop();
170         mAudioSink->flush();
171         mAudioSink->close();
172     }
173 
174     // Try to avoid racing condition in case callback is still on.
175     Mutex::Autolock autoLock(mLock);
176     if (mUseAudioCallback) {
177         flushQueue(&mAudioQueue);
178         flushQueue(&mVideoQueue);
179     }
180     mWakeLock.clear();
181     mVideoScheduler.clear();
182     mNotify.clear();
183     mAudioSink.clear();
184 }
185 
queueBuffer(bool audio,const sp<MediaCodecBuffer> & buffer,const sp<AMessage> & notifyConsumed)186 void NuPlayer::Renderer::queueBuffer(
187         bool audio,
188         const sp<MediaCodecBuffer> &buffer,
189         const sp<AMessage> &notifyConsumed) {
190     sp<AMessage> msg = new AMessage(kWhatQueueBuffer, this);
191     msg->setInt32("queueGeneration", getQueueGeneration(audio));
192     msg->setInt32("audio", static_cast<int32_t>(audio));
193     msg->setObject("buffer", buffer);
194     msg->setMessage("notifyConsumed", notifyConsumed);
195     msg->post();
196 }
197 
queueEOS(bool audio,status_t finalResult)198 void NuPlayer::Renderer::queueEOS(bool audio, status_t finalResult) {
199     CHECK_NE(finalResult, (status_t)OK);
200 
201     sp<AMessage> msg = new AMessage(kWhatQueueEOS, this);
202     msg->setInt32("queueGeneration", getQueueGeneration(audio));
203     msg->setInt32("audio", static_cast<int32_t>(audio));
204     msg->setInt32("finalResult", finalResult);
205     msg->post();
206 }
207 
setPlaybackSettings(const AudioPlaybackRate & rate)208 status_t NuPlayer::Renderer::setPlaybackSettings(const AudioPlaybackRate &rate) {
209     sp<AMessage> msg = new AMessage(kWhatConfigPlayback, this);
210     writeToAMessage(msg, rate);
211     sp<AMessage> response;
212     status_t err = msg->postAndAwaitResponse(&response);
213     if (err == OK && response != NULL) {
214         CHECK(response->findInt32("err", &err));
215     }
216     return err;
217 }
218 
onConfigPlayback(const AudioPlaybackRate & rate)219 status_t NuPlayer::Renderer::onConfigPlayback(const AudioPlaybackRate &rate /* sanitized */) {
220     if (rate.mSpeed == 0.f) {
221         onPause();
222         // don't call audiosink's setPlaybackRate if pausing, as pitch does not
223         // have to correspond to the any non-0 speed (e.g old speed). Keep
224         // settings nonetheless, using the old speed, in case audiosink changes.
225         AudioPlaybackRate newRate = rate;
226         newRate.mSpeed = mPlaybackSettings.mSpeed;
227         mPlaybackSettings = newRate;
228         return OK;
229     }
230 
231     if (mAudioSink != NULL && mAudioSink->ready()) {
232         status_t err = mAudioSink->setPlaybackRate(rate);
233         if (err != OK) {
234             return err;
235         }
236     }
237     mPlaybackSettings = rate;
238     mPlaybackRate = rate.mSpeed;
239     mMediaClock->setPlaybackRate(mPlaybackRate);
240     return OK;
241 }
242 
getPlaybackSettings(AudioPlaybackRate * rate)243 status_t NuPlayer::Renderer::getPlaybackSettings(AudioPlaybackRate *rate /* nonnull */) {
244     sp<AMessage> msg = new AMessage(kWhatGetPlaybackSettings, this);
245     sp<AMessage> response;
246     status_t err = msg->postAndAwaitResponse(&response);
247     if (err == OK && response != NULL) {
248         CHECK(response->findInt32("err", &err));
249         if (err == OK) {
250             readFromAMessage(response, rate);
251         }
252     }
253     return err;
254 }
255 
onGetPlaybackSettings(AudioPlaybackRate * rate)256 status_t NuPlayer::Renderer::onGetPlaybackSettings(AudioPlaybackRate *rate /* nonnull */) {
257     if (mAudioSink != NULL && mAudioSink->ready()) {
258         status_t err = mAudioSink->getPlaybackRate(rate);
259         if (err == OK) {
260             if (!isAudioPlaybackRateEqual(*rate, mPlaybackSettings)) {
261                 ALOGW("correcting mismatch in internal/external playback rate");
262             }
263             // get playback settings used by audiosink, as it may be
264             // slightly off due to audiosink not taking small changes.
265             mPlaybackSettings = *rate;
266             if (mPaused) {
267                 rate->mSpeed = 0.f;
268             }
269         }
270         return err;
271     }
272     *rate = mPlaybackSettings;
273     return OK;
274 }
275 
setSyncSettings(const AVSyncSettings & sync,float videoFpsHint)276 status_t NuPlayer::Renderer::setSyncSettings(const AVSyncSettings &sync, float videoFpsHint) {
277     sp<AMessage> msg = new AMessage(kWhatConfigSync, this);
278     writeToAMessage(msg, sync, videoFpsHint);
279     sp<AMessage> response;
280     status_t err = msg->postAndAwaitResponse(&response);
281     if (err == OK && response != NULL) {
282         CHECK(response->findInt32("err", &err));
283     }
284     return err;
285 }
286 
onConfigSync(const AVSyncSettings & sync,float videoFpsHint __unused)287 status_t NuPlayer::Renderer::onConfigSync(const AVSyncSettings &sync, float videoFpsHint __unused) {
288     if (sync.mSource != AVSYNC_SOURCE_DEFAULT) {
289         return BAD_VALUE;
290     }
291     // TODO: support sync sources
292     return INVALID_OPERATION;
293 }
294 
getSyncSettings(AVSyncSettings * sync,float * videoFps)295 status_t NuPlayer::Renderer::getSyncSettings(AVSyncSettings *sync, float *videoFps) {
296     sp<AMessage> msg = new AMessage(kWhatGetSyncSettings, this);
297     sp<AMessage> response;
298     status_t err = msg->postAndAwaitResponse(&response);
299     if (err == OK && response != NULL) {
300         CHECK(response->findInt32("err", &err));
301         if (err == OK) {
302             readFromAMessage(response, sync, videoFps);
303         }
304     }
305     return err;
306 }
307 
onGetSyncSettings(AVSyncSettings * sync,float * videoFps)308 status_t NuPlayer::Renderer::onGetSyncSettings(
309         AVSyncSettings *sync /* nonnull */, float *videoFps /* nonnull */) {
310     *sync = mSyncSettings;
311     *videoFps = -1.f;
312     return OK;
313 }
314 
flush(bool audio,bool notifyComplete)315 void NuPlayer::Renderer::flush(bool audio, bool notifyComplete) {
316     {
317         Mutex::Autolock autoLock(mLock);
318         if (audio) {
319             mNotifyCompleteAudio |= notifyComplete;
320             clearAudioFirstAnchorTime_l();
321             ++mAudioQueueGeneration;
322             ++mAudioDrainGeneration;
323         } else {
324             mNotifyCompleteVideo |= notifyComplete;
325             ++mVideoQueueGeneration;
326             ++mVideoDrainGeneration;
327             mNextVideoTimeMediaUs = -1;
328         }
329 
330         mMediaClock->clearAnchor();
331         mVideoLateByUs = 0;
332         mSyncQueues = false;
333     }
334 
335     // Wait until the current job in the message queue is done, to make sure
336     // buffer processing from the old generation is finished. After the current
337     // job is finished, access to buffers are protected by generation.
338     Mutex::Autolock syncLock(mSyncLock);
339     int64_t syncCount = mSyncCount;
340     mSyncFlag.clear();
341 
342     // Make sure message queue is not empty after mSyncFlag is cleared.
343     sp<AMessage> msg = new AMessage(kWhatFlush, this);
344     msg->setInt32("audio", static_cast<int32_t>(audio));
345     msg->post();
346 
347     int64_t uptimeMs = uptimeMillis();
348     while (mSyncCount == syncCount) {
349         (void)mSyncCondition.waitRelative(mSyncLock, ms2ns(1000));
350         if (uptimeMillis() - uptimeMs > 1000) {
351             ALOGW("flush(): no wake-up from sync point for 1s; stop waiting to "
352                   "prevent being stuck indefinitely.");
353             break;
354         }
355     }
356 }
357 
signalTimeDiscontinuity()358 void NuPlayer::Renderer::signalTimeDiscontinuity() {
359 }
360 
signalDisableOffloadAudio()361 void NuPlayer::Renderer::signalDisableOffloadAudio() {
362     (new AMessage(kWhatDisableOffloadAudio, this))->post();
363 }
364 
signalEnableOffloadAudio()365 void NuPlayer::Renderer::signalEnableOffloadAudio() {
366     (new AMessage(kWhatEnableOffloadAudio, this))->post();
367 }
368 
pause()369 void NuPlayer::Renderer::pause() {
370     (new AMessage(kWhatPause, this))->post();
371 }
372 
resume()373 void NuPlayer::Renderer::resume() {
374     (new AMessage(kWhatResume, this))->post();
375 }
376 
setVideoFrameRate(float fps)377 void NuPlayer::Renderer::setVideoFrameRate(float fps) {
378     sp<AMessage> msg = new AMessage(kWhatSetVideoFrameRate, this);
379     msg->setFloat("frame-rate", fps);
380     msg->post();
381 }
382 
383 // Called on any threads without mLock acquired.
getCurrentPosition(int64_t * mediaUs)384 status_t NuPlayer::Renderer::getCurrentPosition(int64_t *mediaUs) {
385     status_t result = mMediaClock->getMediaTime(ALooper::GetNowUs(), mediaUs);
386     if (result == OK) {
387         return result;
388     }
389 
390     // MediaClock has not started yet. Try to start it if possible.
391     {
392         Mutex::Autolock autoLock(mLock);
393         if (mAudioFirstAnchorTimeMediaUs == -1) {
394             return result;
395         }
396 
397         AudioTimestamp ts;
398         status_t res = mAudioSink->getTimestamp(ts);
399         if (res != OK) {
400             return result;
401         }
402 
403         // AudioSink has rendered some frames.
404         int64_t nowUs = ALooper::GetNowUs();
405         int64_t playedOutDurationUs = mAudioSink->getPlayedOutDurationUs(nowUs);
406         if (playedOutDurationUs == 0) {
407             *mediaUs = mAudioFirstAnchorTimeMediaUs;
408             return OK;
409         }
410         int64_t nowMediaUs = playedOutDurationUs + mAudioFirstAnchorTimeMediaUs;
411         mMediaClock->updateAnchor(nowMediaUs, nowUs, -1);
412     }
413 
414     return mMediaClock->getMediaTime(ALooper::GetNowUs(), mediaUs);
415 }
416 
clearAudioFirstAnchorTime_l()417 void NuPlayer::Renderer::clearAudioFirstAnchorTime_l() {
418     mAudioFirstAnchorTimeMediaUs = -1;
419     mMediaClock->setStartingTimeMedia(-1);
420 }
421 
setAudioFirstAnchorTimeIfNeeded_l(int64_t mediaUs)422 void NuPlayer::Renderer::setAudioFirstAnchorTimeIfNeeded_l(int64_t mediaUs) {
423     if (mAudioFirstAnchorTimeMediaUs == -1) {
424         mAudioFirstAnchorTimeMediaUs = mediaUs;
425         mMediaClock->setStartingTimeMedia(mediaUs);
426     }
427 }
428 
429 // Called on renderer looper.
clearAnchorTime()430 void NuPlayer::Renderer::clearAnchorTime() {
431     mMediaClock->clearAnchor();
432     mAnchorTimeMediaUs = -1;
433     mAnchorNumFramesWritten = -1;
434 }
435 
setVideoLateByUs(int64_t lateUs)436 void NuPlayer::Renderer::setVideoLateByUs(int64_t lateUs) {
437     Mutex::Autolock autoLock(mLock);
438     mVideoLateByUs = lateUs;
439 }
440 
getVideoLateByUs()441 int64_t NuPlayer::Renderer::getVideoLateByUs() {
442     Mutex::Autolock autoLock(mLock);
443     return mVideoLateByUs;
444 }
445 
openAudioSink(const sp<AMessage> & format,bool offloadOnly,bool hasVideo,uint32_t flags,bool * isOffloaded,bool isStreaming)446 status_t NuPlayer::Renderer::openAudioSink(
447         const sp<AMessage> &format,
448         bool offloadOnly,
449         bool hasVideo,
450         uint32_t flags,
451         bool *isOffloaded,
452         bool isStreaming) {
453     sp<AMessage> msg = new AMessage(kWhatOpenAudioSink, this);
454     msg->setMessage("format", format);
455     msg->setInt32("offload-only", offloadOnly);
456     msg->setInt32("has-video", hasVideo);
457     msg->setInt32("flags", flags);
458     msg->setInt32("isStreaming", isStreaming);
459 
460     sp<AMessage> response;
461     status_t postStatus = msg->postAndAwaitResponse(&response);
462 
463     int32_t err;
464     if (postStatus != OK || response.get() == nullptr || !response->findInt32("err", &err)) {
465         err = INVALID_OPERATION;
466     } else if (err == OK && isOffloaded != NULL) {
467         int32_t offload;
468         CHECK(response->findInt32("offload", &offload));
469         *isOffloaded = (offload != 0);
470     }
471     return err;
472 }
473 
closeAudioSink()474 void NuPlayer::Renderer::closeAudioSink() {
475     sp<AMessage> msg = new AMessage(kWhatCloseAudioSink, this);
476 
477     sp<AMessage> response;
478     msg->postAndAwaitResponse(&response);
479 }
480 
dump(AString & logString)481 void NuPlayer::Renderer::dump(AString& logString) {
482     Mutex::Autolock autoLock(mLock);
483     logString.append("paused(");
484     logString.append(mPaused);
485     logString.append("), offloading(");
486     logString.append(offloadingAudio());
487     logString.append("), wakelock(acquired=");
488     mWakelockAcquireEvent.dump(logString);
489     logString.append(", timeout=");
490     mWakelockTimeoutEvent.dump(logString);
491     logString.append(", release=");
492     mWakelockReleaseEvent.dump(logString);
493     logString.append(", cancel=");
494     mWakelockCancelEvent.dump(logString);
495     logString.append(")");
496 }
497 
changeAudioFormat(const sp<AMessage> & format,bool offloadOnly,bool hasVideo,uint32_t flags,bool isStreaming,const sp<AMessage> & notify)498 void NuPlayer::Renderer::changeAudioFormat(
499         const sp<AMessage> &format,
500         bool offloadOnly,
501         bool hasVideo,
502         uint32_t flags,
503         bool isStreaming,
504         const sp<AMessage> &notify) {
505     sp<AMessage> meta = new AMessage;
506     meta->setMessage("format", format);
507     meta->setInt32("offload-only", offloadOnly);
508     meta->setInt32("has-video", hasVideo);
509     meta->setInt32("flags", flags);
510     meta->setInt32("isStreaming", isStreaming);
511 
512     sp<AMessage> msg = new AMessage(kWhatChangeAudioFormat, this);
513     msg->setInt32("queueGeneration", getQueueGeneration(true /* audio */));
514     msg->setMessage("notify", notify);
515     msg->setMessage("meta", meta);
516     msg->post();
517 }
518 
onMessageReceived(const sp<AMessage> & msg)519 void NuPlayer::Renderer::onMessageReceived(const sp<AMessage> &msg) {
520     switch (msg->what()) {
521         case kWhatOpenAudioSink:
522         {
523             sp<AMessage> format;
524             CHECK(msg->findMessage("format", &format));
525 
526             int32_t offloadOnly;
527             CHECK(msg->findInt32("offload-only", &offloadOnly));
528 
529             int32_t hasVideo;
530             CHECK(msg->findInt32("has-video", &hasVideo));
531 
532             uint32_t flags;
533             CHECK(msg->findInt32("flags", (int32_t *)&flags));
534 
535             uint32_t isStreaming;
536             CHECK(msg->findInt32("isStreaming", (int32_t *)&isStreaming));
537 
538             status_t err = onOpenAudioSink(format, offloadOnly, hasVideo, flags, isStreaming);
539 
540             sp<AMessage> response = new AMessage;
541             response->setInt32("err", err);
542             response->setInt32("offload", offloadingAudio());
543 
544             sp<AReplyToken> replyID;
545             CHECK(msg->senderAwaitsResponse(&replyID));
546             response->postReply(replyID);
547 
548             break;
549         }
550 
551         case kWhatCloseAudioSink:
552         {
553             sp<AReplyToken> replyID;
554             CHECK(msg->senderAwaitsResponse(&replyID));
555 
556             onCloseAudioSink();
557 
558             sp<AMessage> response = new AMessage;
559             response->postReply(replyID);
560             break;
561         }
562 
563         case kWhatStopAudioSink:
564         {
565             mAudioSink->stop();
566             break;
567         }
568 
569         case kWhatChangeAudioFormat:
570         {
571             int32_t queueGeneration;
572             CHECK(msg->findInt32("queueGeneration", &queueGeneration));
573 
574             sp<AMessage> notify;
575             CHECK(msg->findMessage("notify", &notify));
576 
577             if (offloadingAudio()) {
578                 ALOGW("changeAudioFormat should NOT be called in offload mode");
579                 notify->setInt32("err", INVALID_OPERATION);
580                 notify->post();
581                 break;
582             }
583 
584             sp<AMessage> meta;
585             CHECK(msg->findMessage("meta", &meta));
586 
587             if (queueGeneration != getQueueGeneration(true /* audio */)
588                     || mAudioQueue.empty()) {
589                 onChangeAudioFormat(meta, notify);
590                 break;
591             }
592 
593             QueueEntry entry;
594             entry.mNotifyConsumed = notify;
595             entry.mMeta = meta;
596 
597             Mutex::Autolock autoLock(mLock);
598             mAudioQueue.push_back(entry);
599             postDrainAudioQueue_l();
600 
601             break;
602         }
603 
604         case kWhatDrainAudioQueue:
605         {
606             mDrainAudioQueuePending = false;
607 
608             int32_t generation;
609             CHECK(msg->findInt32("drainGeneration", &generation));
610             if (generation != getDrainGeneration(true /* audio */)) {
611                 break;
612             }
613 
614             if (onDrainAudioQueue()) {
615                 uint32_t numFramesPlayed;
616                 CHECK_EQ(mAudioSink->getPosition(&numFramesPlayed),
617                          (status_t)OK);
618 
619                 // Handle AudioTrack race when start is immediately called after flush.
620                 uint32_t numFramesPendingPlayout =
621                     (mNumFramesWritten > numFramesPlayed ?
622                         mNumFramesWritten - numFramesPlayed : 0);
623 
624                 // This is how long the audio sink will have data to
625                 // play back.
626                 int64_t delayUs =
627                     mAudioSink->msecsPerFrame()
628                         * numFramesPendingPlayout * 1000LL;
629                 if (mPlaybackRate > 1.0f) {
630                     delayUs /= mPlaybackRate;
631                 }
632 
633                 // Let's give it more data after about half that time
634                 // has elapsed.
635                 delayUs /= 2;
636                 // check the buffer size to estimate maximum delay permitted.
637                 const int64_t maxDrainDelayUs = std::max(
638                         mAudioSink->getBufferDurationInUs(), (int64_t)500000 /* half second */);
639                 ALOGD_IF(delayUs > maxDrainDelayUs, "postDrainAudioQueue long delay: %lld > %lld",
640                         (long long)delayUs, (long long)maxDrainDelayUs);
641                 Mutex::Autolock autoLock(mLock);
642                 postDrainAudioQueue_l(delayUs);
643             }
644             break;
645         }
646 
647         case kWhatDrainVideoQueue:
648         {
649             int32_t generation;
650             CHECK(msg->findInt32("drainGeneration", &generation));
651             if (generation != getDrainGeneration(false /* audio */)) {
652                 break;
653             }
654 
655             mDrainVideoQueuePending = false;
656 
657             onDrainVideoQueue();
658 
659             postDrainVideoQueue();
660             break;
661         }
662 
663         case kWhatPostDrainVideoQueue:
664         {
665             int32_t generation;
666             CHECK(msg->findInt32("drainGeneration", &generation));
667             if (generation != getDrainGeneration(false /* audio */)) {
668                 break;
669             }
670 
671             mDrainVideoQueuePending = false;
672             postDrainVideoQueue();
673             break;
674         }
675 
676         case kWhatQueueBuffer:
677         {
678             onQueueBuffer(msg);
679             break;
680         }
681 
682         case kWhatQueueEOS:
683         {
684             onQueueEOS(msg);
685             break;
686         }
687 
688         case kWhatEOS:
689         {
690             int32_t generation;
691             CHECK(msg->findInt32("audioEOSGeneration", &generation));
692             if (generation != mAudioEOSGeneration) {
693                 break;
694             }
695             status_t finalResult;
696             CHECK(msg->findInt32("finalResult", &finalResult));
697             notifyEOS(true /* audio */, finalResult);
698             break;
699         }
700 
701         case kWhatConfigPlayback:
702         {
703             sp<AReplyToken> replyID;
704             CHECK(msg->senderAwaitsResponse(&replyID));
705             AudioPlaybackRate rate;
706             readFromAMessage(msg, &rate);
707             status_t err = onConfigPlayback(rate);
708             sp<AMessage> response = new AMessage;
709             response->setInt32("err", err);
710             response->postReply(replyID);
711             break;
712         }
713 
714         case kWhatGetPlaybackSettings:
715         {
716             sp<AReplyToken> replyID;
717             CHECK(msg->senderAwaitsResponse(&replyID));
718             AudioPlaybackRate rate = AUDIO_PLAYBACK_RATE_DEFAULT;
719             status_t err = onGetPlaybackSettings(&rate);
720             sp<AMessage> response = new AMessage;
721             if (err == OK) {
722                 writeToAMessage(response, rate);
723             }
724             response->setInt32("err", err);
725             response->postReply(replyID);
726             break;
727         }
728 
729         case kWhatConfigSync:
730         {
731             sp<AReplyToken> replyID;
732             CHECK(msg->senderAwaitsResponse(&replyID));
733             AVSyncSettings sync;
734             float videoFpsHint;
735             readFromAMessage(msg, &sync, &videoFpsHint);
736             status_t err = onConfigSync(sync, videoFpsHint);
737             sp<AMessage> response = new AMessage;
738             response->setInt32("err", err);
739             response->postReply(replyID);
740             break;
741         }
742 
743         case kWhatGetSyncSettings:
744         {
745             sp<AReplyToken> replyID;
746             CHECK(msg->senderAwaitsResponse(&replyID));
747 
748             ALOGV("kWhatGetSyncSettings");
749             AVSyncSettings sync;
750             float videoFps = -1.f;
751             status_t err = onGetSyncSettings(&sync, &videoFps);
752             sp<AMessage> response = new AMessage;
753             if (err == OK) {
754                 writeToAMessage(response, sync, videoFps);
755             }
756             response->setInt32("err", err);
757             response->postReply(replyID);
758             break;
759         }
760 
761         case kWhatFlush:
762         {
763             onFlush(msg);
764             break;
765         }
766 
767         case kWhatDisableOffloadAudio:
768         {
769             onDisableOffloadAudio();
770             break;
771         }
772 
773         case kWhatEnableOffloadAudio:
774         {
775             onEnableOffloadAudio();
776             break;
777         }
778 
779         case kWhatPause:
780         {
781             onPause();
782             break;
783         }
784 
785         case kWhatResume:
786         {
787             onResume();
788             break;
789         }
790 
791         case kWhatSetVideoFrameRate:
792         {
793             float fps;
794             CHECK(msg->findFloat("frame-rate", &fps));
795             onSetVideoFrameRate(fps);
796             break;
797         }
798 
799         case kWhatAudioTearDown:
800         {
801             int32_t reason;
802             CHECK(msg->findInt32("reason", &reason));
803 
804             onAudioTearDown((AudioTearDownReason)reason);
805             break;
806         }
807 
808         case kWhatAudioOffloadPauseTimeout:
809         {
810             int32_t generation;
811             CHECK(msg->findInt32("drainGeneration", &generation));
812             mWakelockTimeoutEvent.updateValues(
813                     uptimeMillis(),
814                     generation,
815                     mAudioOffloadPauseTimeoutGeneration);
816             if (generation != mAudioOffloadPauseTimeoutGeneration) {
817                 break;
818             }
819             ALOGV("Audio Offload tear down due to pause timeout.");
820             onAudioTearDown(kDueToTimeout);
821             sp<AMessage> newMsg = new AMessage(kWhatReleaseWakeLock, this);
822             newMsg->setInt32("drainGeneration", generation);
823             newMsg->post(kWakelockReleaseDelayUs);
824             break;
825         }
826 
827         case kWhatReleaseWakeLock:
828         {
829             int32_t generation;
830             CHECK(msg->findInt32("drainGeneration", &generation));
831             mWakelockReleaseEvent.updateValues(
832                 uptimeMillis(),
833                 generation,
834                 mAudioOffloadPauseTimeoutGeneration);
835             if (generation != mAudioOffloadPauseTimeoutGeneration) {
836                 break;
837             }
838             ALOGV("releasing audio offload pause wakelock.");
839             mWakeLock->release();
840             break;
841         }
842 
843         default:
844             TRESPASS();
845             break;
846     }
847     if (!mSyncFlag.test_and_set()) {
848         Mutex::Autolock syncLock(mSyncLock);
849         ++mSyncCount;
850         mSyncCondition.broadcast();
851     }
852 }
853 
postDrainAudioQueue_l(int64_t delayUs)854 void NuPlayer::Renderer::postDrainAudioQueue_l(int64_t delayUs) {
855     if (mDrainAudioQueuePending || mSyncQueues || mUseAudioCallback) {
856         return;
857     }
858 
859     if (mAudioQueue.empty()) {
860         return;
861     }
862 
863     // FIXME: if paused, wait until AudioTrack stop() is complete before delivering data.
864     if (mPaused) {
865         const int64_t diffUs = mPauseDrainAudioAllowedUs - ALooper::GetNowUs();
866         if (diffUs > delayUs) {
867             delayUs = diffUs;
868         }
869     }
870 
871     mDrainAudioQueuePending = true;
872     sp<AMessage> msg = new AMessage(kWhatDrainAudioQueue, this);
873     msg->setInt32("drainGeneration", mAudioDrainGeneration);
874     msg->post(delayUs);
875 }
876 
prepareForMediaRenderingStart_l()877 void NuPlayer::Renderer::prepareForMediaRenderingStart_l() {
878     mAudioRenderingStartGeneration = mAudioDrainGeneration;
879     mVideoRenderingStartGeneration = mVideoDrainGeneration;
880     mRenderingDataDelivered = false;
881 }
882 
notifyIfMediaRenderingStarted_l()883 void NuPlayer::Renderer::notifyIfMediaRenderingStarted_l() {
884     if (mVideoRenderingStartGeneration == mVideoDrainGeneration &&
885         mAudioRenderingStartGeneration == mAudioDrainGeneration) {
886         mRenderingDataDelivered = true;
887         if (mPaused) {
888             return;
889         }
890         mVideoRenderingStartGeneration = -1;
891         mAudioRenderingStartGeneration = -1;
892 
893         sp<AMessage> notify = mNotify->dup();
894         notify->setInt32("what", kWhatMediaRenderingStart);
895         notify->post();
896     }
897 }
898 
899 // static
AudioSinkCallback(MediaPlayerBase::AudioSink *,void * buffer,size_t size,void * cookie,MediaPlayerBase::AudioSink::cb_event_t event)900 size_t NuPlayer::Renderer::AudioSinkCallback(
901         MediaPlayerBase::AudioSink * /* audioSink */,
902         void *buffer,
903         size_t size,
904         void *cookie,
905         MediaPlayerBase::AudioSink::cb_event_t event) {
906     NuPlayer::Renderer *me = (NuPlayer::Renderer *)cookie;
907 
908     switch (event) {
909         case MediaPlayerBase::AudioSink::CB_EVENT_FILL_BUFFER:
910         {
911             return me->fillAudioBuffer(buffer, size);
912             break;
913         }
914 
915         case MediaPlayerBase::AudioSink::CB_EVENT_STREAM_END:
916         {
917             ALOGV("AudioSink::CB_EVENT_STREAM_END");
918             me->notifyEOSCallback();
919             break;
920         }
921 
922         case MediaPlayerBase::AudioSink::CB_EVENT_TEAR_DOWN:
923         {
924             ALOGV("AudioSink::CB_EVENT_TEAR_DOWN");
925             me->notifyAudioTearDown(kDueToError);
926             break;
927         }
928     }
929 
930     return 0;
931 }
932 
notifyEOSCallback()933 void NuPlayer::Renderer::notifyEOSCallback() {
934     Mutex::Autolock autoLock(mLock);
935 
936     if (!mUseAudioCallback) {
937         return;
938     }
939 
940     notifyEOS_l(true /* audio */, ERROR_END_OF_STREAM);
941 }
942 
fillAudioBuffer(void * buffer,size_t size)943 size_t NuPlayer::Renderer::fillAudioBuffer(void *buffer, size_t size) {
944     Mutex::Autolock autoLock(mLock);
945 
946     if (!mUseAudioCallback) {
947         return 0;
948     }
949 
950     bool hasEOS = false;
951 
952     size_t sizeCopied = 0;
953     bool firstEntry = true;
954     QueueEntry *entry;  // will be valid after while loop if hasEOS is set.
955     while (sizeCopied < size && !mAudioQueue.empty()) {
956         entry = &*mAudioQueue.begin();
957 
958         if (entry->mBuffer == NULL) { // EOS
959             hasEOS = true;
960             mAudioQueue.erase(mAudioQueue.begin());
961             break;
962         }
963 
964         if (firstEntry && entry->mOffset == 0) {
965             firstEntry = false;
966             int64_t mediaTimeUs;
967             CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
968             if (mediaTimeUs < 0) {
969                 ALOGD("fillAudioBuffer: reset negative media time %.2f secs to zero",
970                        mediaTimeUs / 1E6);
971                 mediaTimeUs = 0;
972             }
973             ALOGV("fillAudioBuffer: rendering audio at media time %.2f secs", mediaTimeUs / 1E6);
974             setAudioFirstAnchorTimeIfNeeded_l(mediaTimeUs);
975         }
976 
977         size_t copy = entry->mBuffer->size() - entry->mOffset;
978         size_t sizeRemaining = size - sizeCopied;
979         if (copy > sizeRemaining) {
980             copy = sizeRemaining;
981         }
982 
983         memcpy((char *)buffer + sizeCopied,
984                entry->mBuffer->data() + entry->mOffset,
985                copy);
986 
987         entry->mOffset += copy;
988         if (entry->mOffset == entry->mBuffer->size()) {
989             entry->mNotifyConsumed->post();
990             mAudioQueue.erase(mAudioQueue.begin());
991             entry = NULL;
992         }
993         sizeCopied += copy;
994 
995         notifyIfMediaRenderingStarted_l();
996     }
997 
998     if (mAudioFirstAnchorTimeMediaUs >= 0) {
999         int64_t nowUs = ALooper::GetNowUs();
1000         int64_t nowMediaUs =
1001             mAudioFirstAnchorTimeMediaUs + mAudioSink->getPlayedOutDurationUs(nowUs);
1002         // we don't know how much data we are queueing for offloaded tracks.
1003         mMediaClock->updateAnchor(nowMediaUs, nowUs, INT64_MAX);
1004     }
1005 
1006     // for non-offloaded audio, we need to compute the frames written because
1007     // there is no EVENT_STREAM_END notification. The frames written gives
1008     // an estimate on the pending played out duration.
1009     if (!offloadingAudio()) {
1010         mNumFramesWritten += sizeCopied / mAudioSink->frameSize();
1011     }
1012 
1013     if (hasEOS) {
1014         (new AMessage(kWhatStopAudioSink, this))->post();
1015         // As there is currently no EVENT_STREAM_END callback notification for
1016         // non-offloaded audio tracks, we need to post the EOS ourselves.
1017         if (!offloadingAudio()) {
1018             int64_t postEOSDelayUs = 0;
1019             if (mAudioSink->needsTrailingPadding()) {
1020                 postEOSDelayUs = getPendingAudioPlayoutDurationUs(ALooper::GetNowUs());
1021             }
1022             ALOGV("fillAudioBuffer: notifyEOS_l "
1023                     "mNumFramesWritten:%u  finalResult:%d  postEOSDelay:%lld",
1024                     mNumFramesWritten, entry->mFinalResult, (long long)postEOSDelayUs);
1025             notifyEOS_l(true /* audio */, entry->mFinalResult, postEOSDelayUs);
1026         }
1027     }
1028     return sizeCopied;
1029 }
1030 
drainAudioQueueUntilLastEOS()1031 void NuPlayer::Renderer::drainAudioQueueUntilLastEOS() {
1032     List<QueueEntry>::iterator it = mAudioQueue.begin(), itEOS = it;
1033     bool foundEOS = false;
1034     while (it != mAudioQueue.end()) {
1035         int32_t eos;
1036         QueueEntry *entry = &*it++;
1037         if ((entry->mBuffer == nullptr && entry->mNotifyConsumed == nullptr)
1038                 || (entry->mNotifyConsumed->findInt32("eos", &eos) && eos != 0)) {
1039             itEOS = it;
1040             foundEOS = true;
1041         }
1042     }
1043 
1044     if (foundEOS) {
1045         // post all replies before EOS and drop the samples
1046         for (it = mAudioQueue.begin(); it != itEOS; it++) {
1047             if (it->mBuffer == nullptr) {
1048                 if (it->mNotifyConsumed == nullptr) {
1049                     // delay doesn't matter as we don't even have an AudioTrack
1050                     notifyEOS(true /* audio */, it->mFinalResult);
1051                 } else {
1052                     // TAG for re-opening audio sink.
1053                     onChangeAudioFormat(it->mMeta, it->mNotifyConsumed);
1054                 }
1055             } else {
1056                 it->mNotifyConsumed->post();
1057             }
1058         }
1059         mAudioQueue.erase(mAudioQueue.begin(), itEOS);
1060     }
1061 }
1062 
onDrainAudioQueue()1063 bool NuPlayer::Renderer::onDrainAudioQueue() {
1064     // do not drain audio during teardown as queued buffers may be invalid.
1065     if (mAudioTornDown) {
1066         return false;
1067     }
1068     // TODO: This call to getPosition checks if AudioTrack has been created
1069     // in AudioSink before draining audio. If AudioTrack doesn't exist, then
1070     // CHECKs on getPosition will fail.
1071     // We still need to figure out why AudioTrack is not created when
1072     // this function is called. One possible reason could be leftover
1073     // audio. Another possible place is to check whether decoder
1074     // has received INFO_FORMAT_CHANGED as the first buffer since
1075     // AudioSink is opened there, and possible interactions with flush
1076     // immediately after start. Investigate error message
1077     // "vorbis_dsp_synthesis returned -135", along with RTSP.
1078     uint32_t numFramesPlayed;
1079     if (mAudioSink->getPosition(&numFramesPlayed) != OK) {
1080         // When getPosition fails, renderer will not reschedule the draining
1081         // unless new samples are queued.
1082         // If we have pending EOS (or "eos" marker for discontinuities), we need
1083         // to post these now as NuPlayerDecoder might be waiting for it.
1084         drainAudioQueueUntilLastEOS();
1085 
1086         ALOGW("onDrainAudioQueue(): audio sink is not ready");
1087         return false;
1088     }
1089 
1090 #if 0
1091     ssize_t numFramesAvailableToWrite =
1092         mAudioSink->frameCount() - (mNumFramesWritten - numFramesPlayed);
1093 
1094     if (numFramesAvailableToWrite == mAudioSink->frameCount()) {
1095         ALOGI("audio sink underrun");
1096     } else {
1097         ALOGV("audio queue has %d frames left to play",
1098              mAudioSink->frameCount() - numFramesAvailableToWrite);
1099     }
1100 #endif
1101 
1102     uint32_t prevFramesWritten = mNumFramesWritten;
1103     while (!mAudioQueue.empty()) {
1104         QueueEntry *entry = &*mAudioQueue.begin();
1105 
1106         if (entry->mBuffer == NULL) {
1107             if (entry->mNotifyConsumed != nullptr) {
1108                 // TAG for re-open audio sink.
1109                 onChangeAudioFormat(entry->mMeta, entry->mNotifyConsumed);
1110                 mAudioQueue.erase(mAudioQueue.begin());
1111                 continue;
1112             }
1113 
1114             // EOS
1115             if (mPaused) {
1116                 // Do not notify EOS when paused.
1117                 // This is needed to avoid switch to next clip while in pause.
1118                 ALOGV("onDrainAudioQueue(): Do not notify EOS when paused");
1119                 return false;
1120             }
1121 
1122             int64_t postEOSDelayUs = 0;
1123             if (mAudioSink->needsTrailingPadding()) {
1124                 postEOSDelayUs = getPendingAudioPlayoutDurationUs(ALooper::GetNowUs());
1125             }
1126             notifyEOS(true /* audio */, entry->mFinalResult, postEOSDelayUs);
1127             mLastAudioMediaTimeUs = getDurationUsIfPlayedAtSampleRate(mNumFramesWritten);
1128 
1129             mAudioQueue.erase(mAudioQueue.begin());
1130             entry = NULL;
1131             if (mAudioSink->needsTrailingPadding()) {
1132                 // If we're not in gapless playback (i.e. through setNextPlayer), we
1133                 // need to stop the track here, because that will play out the last
1134                 // little bit at the end of the file. Otherwise short files won't play.
1135                 mAudioSink->stop();
1136                 mNumFramesWritten = 0;
1137             }
1138             return false;
1139         }
1140 
1141         mLastAudioBufferDrained = entry->mBufferOrdinal;
1142 
1143         // ignore 0-sized buffer which could be EOS marker with no data
1144         if (entry->mOffset == 0 && entry->mBuffer->size() > 0) {
1145             int64_t mediaTimeUs;
1146             CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
1147             ALOGV("onDrainAudioQueue: rendering audio at media time %.2f secs",
1148                     mediaTimeUs / 1E6);
1149             onNewAudioMediaTime(mediaTimeUs);
1150         }
1151 
1152         size_t copy = entry->mBuffer->size() - entry->mOffset;
1153 
1154         ssize_t written = mAudioSink->write(entry->mBuffer->data() + entry->mOffset,
1155                                             copy, false /* blocking */);
1156         if (written < 0) {
1157             // An error in AudioSink write. Perhaps the AudioSink was not properly opened.
1158             if (written == WOULD_BLOCK) {
1159                 ALOGV("AudioSink write would block when writing %zu bytes", copy);
1160             } else {
1161                 ALOGE("AudioSink write error(%zd) when writing %zu bytes", written, copy);
1162                 // This can only happen when AudioSink was opened with doNotReconnect flag set to
1163                 // true, in which case the NuPlayer will handle the reconnect.
1164                 notifyAudioTearDown(kDueToError);
1165             }
1166             break;
1167         }
1168 
1169         entry->mOffset += written;
1170         size_t remainder = entry->mBuffer->size() - entry->mOffset;
1171         if ((ssize_t)remainder < mAudioSink->frameSize()) {
1172             if (remainder > 0) {
1173                 ALOGW("Corrupted audio buffer has fractional frames, discarding %zu bytes.",
1174                         remainder);
1175                 entry->mOffset += remainder;
1176                 copy -= remainder;
1177             }
1178 
1179             entry->mNotifyConsumed->post();
1180             mAudioQueue.erase(mAudioQueue.begin());
1181 
1182             entry = NULL;
1183         }
1184 
1185         size_t copiedFrames = written / mAudioSink->frameSize();
1186         mNumFramesWritten += copiedFrames;
1187 
1188         {
1189             Mutex::Autolock autoLock(mLock);
1190             int64_t maxTimeMedia;
1191             maxTimeMedia =
1192                 mAnchorTimeMediaUs +
1193                         (int64_t)(max((long long)mNumFramesWritten - mAnchorNumFramesWritten, 0LL)
1194                                 * 1000LL * mAudioSink->msecsPerFrame());
1195             mMediaClock->updateMaxTimeMedia(maxTimeMedia);
1196 
1197             notifyIfMediaRenderingStarted_l();
1198         }
1199 
1200         if (written != (ssize_t)copy) {
1201             // A short count was received from AudioSink::write()
1202             //
1203             // AudioSink write is called in non-blocking mode.
1204             // It may return with a short count when:
1205             //
1206             // 1) Size to be copied is not a multiple of the frame size. Fractional frames are
1207             //    discarded.
1208             // 2) The data to be copied exceeds the available buffer in AudioSink.
1209             // 3) An error occurs and data has been partially copied to the buffer in AudioSink.
1210             // 4) AudioSink is an AudioCache for data retrieval, and the AudioCache is exceeded.
1211 
1212             // (Case 1)
1213             // Must be a multiple of the frame size.  If it is not a multiple of a frame size, it
1214             // needs to fail, as we should not carry over fractional frames between calls.
1215             CHECK_EQ(copy % mAudioSink->frameSize(), 0u);
1216 
1217             // (Case 2, 3, 4)
1218             // Return early to the caller.
1219             // Beware of calling immediately again as this may busy-loop if you are not careful.
1220             ALOGV("AudioSink write short frame count %zd < %zu", written, copy);
1221             break;
1222         }
1223     }
1224 
1225     // calculate whether we need to reschedule another write.
1226     bool reschedule = !mAudioQueue.empty()
1227             && (!mPaused
1228                 || prevFramesWritten != mNumFramesWritten); // permit pause to fill buffers
1229     //ALOGD("reschedule:%d  empty:%d  mPaused:%d  prevFramesWritten:%u  mNumFramesWritten:%u",
1230     //        reschedule, mAudioQueue.empty(), mPaused, prevFramesWritten, mNumFramesWritten);
1231     return reschedule;
1232 }
1233 
getDurationUsIfPlayedAtSampleRate(uint32_t numFrames)1234 int64_t NuPlayer::Renderer::getDurationUsIfPlayedAtSampleRate(uint32_t numFrames) {
1235     int32_t sampleRate = offloadingAudio() ?
1236             mCurrentOffloadInfo.sample_rate : mCurrentPcmInfo.mSampleRate;
1237     if (sampleRate == 0) {
1238         ALOGE("sampleRate is 0 in %s mode", offloadingAudio() ? "offload" : "non-offload");
1239         return 0;
1240     }
1241 
1242     return (int64_t)(numFrames * 1000000LL / sampleRate);
1243 }
1244 
1245 // Calculate duration of pending samples if played at normal rate (i.e., 1.0).
getPendingAudioPlayoutDurationUs(int64_t nowUs)1246 int64_t NuPlayer::Renderer::getPendingAudioPlayoutDurationUs(int64_t nowUs) {
1247     int64_t writtenAudioDurationUs = getDurationUsIfPlayedAtSampleRate(mNumFramesWritten);
1248     if (mUseVirtualAudioSink) {
1249         int64_t nowUs = ALooper::GetNowUs();
1250         int64_t mediaUs;
1251         if (mMediaClock->getMediaTime(nowUs, &mediaUs) != OK) {
1252             return 0LL;
1253         } else {
1254             return writtenAudioDurationUs - (mediaUs - mAudioFirstAnchorTimeMediaUs);
1255         }
1256     }
1257 
1258     const int64_t audioSinkPlayedUs = mAudioSink->getPlayedOutDurationUs(nowUs);
1259     int64_t pendingUs = writtenAudioDurationUs - audioSinkPlayedUs;
1260     if (pendingUs < 0) {
1261         // This shouldn't happen unless the timestamp is stale.
1262         ALOGW("%s: pendingUs %lld < 0, clamping to zero, potential resume after pause "
1263                 "writtenAudioDurationUs: %lld, audioSinkPlayedUs: %lld",
1264                 __func__, (long long)pendingUs,
1265                 (long long)writtenAudioDurationUs, (long long)audioSinkPlayedUs);
1266         pendingUs = 0;
1267     }
1268     return pendingUs;
1269 }
1270 
getRealTimeUs(int64_t mediaTimeUs,int64_t nowUs)1271 int64_t NuPlayer::Renderer::getRealTimeUs(int64_t mediaTimeUs, int64_t nowUs) {
1272     int64_t realUs;
1273     if (mMediaClock->getRealTimeFor(mediaTimeUs, &realUs) != OK) {
1274         // If failed to get current position, e.g. due to audio clock is
1275         // not ready, then just play out video immediately without delay.
1276         return nowUs;
1277     }
1278     return realUs;
1279 }
1280 
onNewAudioMediaTime(int64_t mediaTimeUs)1281 void NuPlayer::Renderer::onNewAudioMediaTime(int64_t mediaTimeUs) {
1282     Mutex::Autolock autoLock(mLock);
1283     // TRICKY: vorbis decoder generates multiple frames with the same
1284     // timestamp, so only update on the first frame with a given timestamp
1285     if (mediaTimeUs == mAnchorTimeMediaUs) {
1286         return;
1287     }
1288     setAudioFirstAnchorTimeIfNeeded_l(mediaTimeUs);
1289 
1290     // mNextAudioClockUpdateTimeUs is -1 if we're waiting for audio sink to start
1291     if (mNextAudioClockUpdateTimeUs == -1) {
1292         AudioTimestamp ts;
1293         if (mAudioSink->getTimestamp(ts) == OK && ts.mPosition > 0) {
1294             mNextAudioClockUpdateTimeUs = 0; // start our clock updates
1295         }
1296     }
1297     int64_t nowUs = ALooper::GetNowUs();
1298     if (mNextAudioClockUpdateTimeUs >= 0) {
1299         if (nowUs >= mNextAudioClockUpdateTimeUs) {
1300             int64_t nowMediaUs = mediaTimeUs - getPendingAudioPlayoutDurationUs(nowUs);
1301             mMediaClock->updateAnchor(nowMediaUs, nowUs, mediaTimeUs);
1302             mUseVirtualAudioSink = false;
1303             mNextAudioClockUpdateTimeUs = nowUs + kMinimumAudioClockUpdatePeriodUs;
1304         }
1305     } else {
1306         int64_t unused;
1307         if ((mMediaClock->getMediaTime(nowUs, &unused) != OK)
1308                 && (getDurationUsIfPlayedAtSampleRate(mNumFramesWritten)
1309                         > kMaxAllowedAudioSinkDelayUs)) {
1310             // Enough data has been sent to AudioSink, but AudioSink has not rendered
1311             // any data yet. Something is wrong with AudioSink, e.g., the device is not
1312             // connected to audio out.
1313             // Switch to system clock. This essentially creates a virtual AudioSink with
1314             // initial latenty of getDurationUsIfPlayedAtSampleRate(mNumFramesWritten).
1315             // This virtual AudioSink renders audio data starting from the very first sample
1316             // and it's paced by system clock.
1317             ALOGW("AudioSink stuck. ARE YOU CONNECTED TO AUDIO OUT? Switching to system clock.");
1318             mMediaClock->updateAnchor(mAudioFirstAnchorTimeMediaUs, nowUs, mediaTimeUs);
1319             mUseVirtualAudioSink = true;
1320         }
1321     }
1322     mAnchorNumFramesWritten = mNumFramesWritten;
1323     mAnchorTimeMediaUs = mediaTimeUs;
1324 }
1325 
1326 // Called without mLock acquired.
postDrainVideoQueue()1327 void NuPlayer::Renderer::postDrainVideoQueue() {
1328     if (mDrainVideoQueuePending
1329             || getSyncQueues()
1330             || (mPaused && mVideoSampleReceived)) {
1331         return;
1332     }
1333 
1334     if (mVideoQueue.empty()) {
1335         return;
1336     }
1337 
1338     QueueEntry &entry = *mVideoQueue.begin();
1339 
1340     sp<AMessage> msg = new AMessage(kWhatDrainVideoQueue, this);
1341     msg->setInt32("drainGeneration", getDrainGeneration(false /* audio */));
1342 
1343     if (entry.mBuffer == NULL) {
1344         // EOS doesn't carry a timestamp.
1345         msg->post();
1346         mDrainVideoQueuePending = true;
1347         return;
1348     }
1349 
1350     int64_t nowUs = ALooper::GetNowUs();
1351     if (mFlags & FLAG_REAL_TIME) {
1352         int64_t realTimeUs;
1353         CHECK(entry.mBuffer->meta()->findInt64("timeUs", &realTimeUs));
1354 
1355         realTimeUs = mVideoScheduler->schedule(realTimeUs * 1000) / 1000;
1356 
1357         int64_t twoVsyncsUs = 2 * (mVideoScheduler->getVsyncPeriod() / 1000);
1358 
1359         int64_t delayUs = realTimeUs - nowUs;
1360 
1361         ALOGW_IF(delayUs > 500000, "unusually high delayUs: %lld", (long long)delayUs);
1362         // post 2 display refreshes before rendering is due
1363         msg->post(delayUs > twoVsyncsUs ? delayUs - twoVsyncsUs : 0);
1364 
1365         mDrainVideoQueuePending = true;
1366         return;
1367     }
1368 
1369     int64_t mediaTimeUs;
1370     CHECK(entry.mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
1371 
1372     {
1373         Mutex::Autolock autoLock(mLock);
1374         if (mAnchorTimeMediaUs < 0) {
1375             mMediaClock->updateAnchor(mediaTimeUs, nowUs, mediaTimeUs);
1376             mAnchorTimeMediaUs = mediaTimeUs;
1377         }
1378     }
1379     mNextVideoTimeMediaUs = mediaTimeUs;
1380     if (!mHasAudio) {
1381         // smooth out videos >= 10fps
1382         mMediaClock->updateMaxTimeMedia(mediaTimeUs + kDefaultVideoFrameIntervalUs);
1383     }
1384 
1385     if (!mVideoSampleReceived || mediaTimeUs < mAudioFirstAnchorTimeMediaUs) {
1386         msg->post();
1387     } else {
1388         int64_t twoVsyncsUs = 2 * (mVideoScheduler->getVsyncPeriod() / 1000);
1389 
1390         // post 2 display refreshes before rendering is due
1391         mMediaClock->addTimer(msg, mediaTimeUs, -twoVsyncsUs);
1392     }
1393 
1394     mDrainVideoQueuePending = true;
1395 }
1396 
onDrainVideoQueue()1397 void NuPlayer::Renderer::onDrainVideoQueue() {
1398     if (mVideoQueue.empty()) {
1399         return;
1400     }
1401 
1402     QueueEntry *entry = &*mVideoQueue.begin();
1403 
1404     if (entry->mBuffer == NULL) {
1405         // EOS
1406 
1407         notifyEOS(false /* audio */, entry->mFinalResult);
1408 
1409         mVideoQueue.erase(mVideoQueue.begin());
1410         entry = NULL;
1411 
1412         setVideoLateByUs(0);
1413         return;
1414     }
1415 
1416     int64_t nowUs = ALooper::GetNowUs();
1417     int64_t realTimeUs;
1418     int64_t mediaTimeUs = -1;
1419     if (mFlags & FLAG_REAL_TIME) {
1420         CHECK(entry->mBuffer->meta()->findInt64("timeUs", &realTimeUs));
1421     } else {
1422         CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
1423 
1424         realTimeUs = getRealTimeUs(mediaTimeUs, nowUs);
1425     }
1426     realTimeUs = mVideoScheduler->schedule(realTimeUs * 1000) / 1000;
1427 
1428     bool tooLate = false;
1429 
1430     if (!mPaused) {
1431         setVideoLateByUs(nowUs - realTimeUs);
1432         tooLate = (mVideoLateByUs > 40000);
1433 
1434         if (tooLate) {
1435             ALOGV("video late by %lld us (%.2f secs)",
1436                  (long long)mVideoLateByUs, mVideoLateByUs / 1E6);
1437         } else {
1438             int64_t mediaUs = 0;
1439             mMediaClock->getMediaTime(realTimeUs, &mediaUs);
1440             ALOGV("rendering video at media time %.2f secs",
1441                     (mFlags & FLAG_REAL_TIME ? realTimeUs :
1442                     mediaUs) / 1E6);
1443 
1444             if (!(mFlags & FLAG_REAL_TIME)
1445                     && mLastAudioMediaTimeUs != -1
1446                     && mediaTimeUs > mLastAudioMediaTimeUs) {
1447                 // If audio ends before video, video continues to drive media clock.
1448                 // Also smooth out videos >= 10fps.
1449                 mMediaClock->updateMaxTimeMedia(mediaTimeUs + kDefaultVideoFrameIntervalUs);
1450             }
1451         }
1452     } else {
1453         setVideoLateByUs(0);
1454         if (!mVideoSampleReceived && !mHasAudio) {
1455             // This will ensure that the first frame after a flush won't be used as anchor
1456             // when renderer is in paused state, because resume can happen any time after seek.
1457             clearAnchorTime();
1458         }
1459     }
1460 
1461     // Always render the first video frame while keeping stats on A/V sync.
1462     if (!mVideoSampleReceived) {
1463         realTimeUs = nowUs;
1464         tooLate = false;
1465     }
1466 
1467     entry->mNotifyConsumed->setInt64("timestampNs", realTimeUs * 1000LL);
1468     entry->mNotifyConsumed->setInt32("render", !tooLate);
1469     entry->mNotifyConsumed->post();
1470     mVideoQueue.erase(mVideoQueue.begin());
1471     entry = NULL;
1472 
1473     mVideoSampleReceived = true;
1474 
1475     if (!mPaused) {
1476         if (!mVideoRenderingStarted) {
1477             mVideoRenderingStarted = true;
1478             notifyVideoRenderingStart();
1479         }
1480         Mutex::Autolock autoLock(mLock);
1481         notifyIfMediaRenderingStarted_l();
1482     }
1483 }
1484 
notifyVideoRenderingStart()1485 void NuPlayer::Renderer::notifyVideoRenderingStart() {
1486     sp<AMessage> notify = mNotify->dup();
1487     notify->setInt32("what", kWhatVideoRenderingStart);
1488     notify->post();
1489 }
1490 
notifyEOS(bool audio,status_t finalResult,int64_t delayUs)1491 void NuPlayer::Renderer::notifyEOS(bool audio, status_t finalResult, int64_t delayUs) {
1492     Mutex::Autolock autoLock(mLock);
1493     notifyEOS_l(audio, finalResult, delayUs);
1494 }
1495 
notifyEOS_l(bool audio,status_t finalResult,int64_t delayUs)1496 void NuPlayer::Renderer::notifyEOS_l(bool audio, status_t finalResult, int64_t delayUs) {
1497     if (audio && delayUs > 0) {
1498         sp<AMessage> msg = new AMessage(kWhatEOS, this);
1499         msg->setInt32("audioEOSGeneration", mAudioEOSGeneration);
1500         msg->setInt32("finalResult", finalResult);
1501         msg->post(delayUs);
1502         return;
1503     }
1504     sp<AMessage> notify = mNotify->dup();
1505     notify->setInt32("what", kWhatEOS);
1506     notify->setInt32("audio", static_cast<int32_t>(audio));
1507     notify->setInt32("finalResult", finalResult);
1508     notify->post(delayUs);
1509 
1510     if (audio) {
1511         // Video might outlive audio. Clear anchor to enable video only case.
1512         mAnchorTimeMediaUs = -1;
1513         mHasAudio = false;
1514         if (mNextVideoTimeMediaUs >= 0) {
1515             int64_t mediaUs = 0;
1516             int64_t nowUs = ALooper::GetNowUs();
1517             status_t result = mMediaClock->getMediaTime(nowUs, &mediaUs);
1518             if (result == OK) {
1519                 if (mNextVideoTimeMediaUs > mediaUs) {
1520                     mMediaClock->updateMaxTimeMedia(mNextVideoTimeMediaUs);
1521                 }
1522             } else {
1523                 mMediaClock->updateAnchor(
1524                         mNextVideoTimeMediaUs, nowUs,
1525                         mNextVideoTimeMediaUs + kDefaultVideoFrameIntervalUs);
1526             }
1527         }
1528     }
1529 }
1530 
notifyAudioTearDown(AudioTearDownReason reason)1531 void NuPlayer::Renderer::notifyAudioTearDown(AudioTearDownReason reason) {
1532     sp<AMessage> msg = new AMessage(kWhatAudioTearDown, this);
1533     msg->setInt32("reason", reason);
1534     msg->post();
1535 }
1536 
onQueueBuffer(const sp<AMessage> & msg)1537 void NuPlayer::Renderer::onQueueBuffer(const sp<AMessage> &msg) {
1538     int32_t audio;
1539     CHECK(msg->findInt32("audio", &audio));
1540 
1541     if (dropBufferIfStale(audio, msg)) {
1542         return;
1543     }
1544 
1545     if (audio) {
1546         mHasAudio = true;
1547     } else {
1548         mHasVideo = true;
1549     }
1550 
1551     if (mHasVideo) {
1552         if (mVideoScheduler == NULL) {
1553             mVideoScheduler = new VideoFrameScheduler();
1554             mVideoScheduler->init();
1555         }
1556     }
1557 
1558     sp<RefBase> obj;
1559     CHECK(msg->findObject("buffer", &obj));
1560     sp<MediaCodecBuffer> buffer = static_cast<MediaCodecBuffer *>(obj.get());
1561 
1562     sp<AMessage> notifyConsumed;
1563     CHECK(msg->findMessage("notifyConsumed", &notifyConsumed));
1564 
1565     QueueEntry entry;
1566     entry.mBuffer = buffer;
1567     entry.mNotifyConsumed = notifyConsumed;
1568     entry.mOffset = 0;
1569     entry.mFinalResult = OK;
1570     entry.mBufferOrdinal = ++mTotalBuffersQueued;
1571 
1572     if (audio) {
1573         Mutex::Autolock autoLock(mLock);
1574         mAudioQueue.push_back(entry);
1575         postDrainAudioQueue_l();
1576     } else {
1577         mVideoQueue.push_back(entry);
1578         postDrainVideoQueue();
1579     }
1580 
1581     Mutex::Autolock autoLock(mLock);
1582     if (!mSyncQueues || mAudioQueue.empty() || mVideoQueue.empty()) {
1583         return;
1584     }
1585 
1586     sp<MediaCodecBuffer> firstAudioBuffer = (*mAudioQueue.begin()).mBuffer;
1587     sp<MediaCodecBuffer> firstVideoBuffer = (*mVideoQueue.begin()).mBuffer;
1588 
1589     if (firstAudioBuffer == NULL || firstVideoBuffer == NULL) {
1590         // EOS signalled on either queue.
1591         syncQueuesDone_l();
1592         return;
1593     }
1594 
1595     int64_t firstAudioTimeUs;
1596     int64_t firstVideoTimeUs;
1597     CHECK(firstAudioBuffer->meta()
1598             ->findInt64("timeUs", &firstAudioTimeUs));
1599     CHECK(firstVideoBuffer->meta()
1600             ->findInt64("timeUs", &firstVideoTimeUs));
1601 
1602     int64_t diff = firstVideoTimeUs - firstAudioTimeUs;
1603 
1604     ALOGV("queueDiff = %.2f secs", diff / 1E6);
1605 
1606     if (diff > 100000LL) {
1607         // Audio data starts More than 0.1 secs before video.
1608         // Drop some audio.
1609 
1610         (*mAudioQueue.begin()).mNotifyConsumed->post();
1611         mAudioQueue.erase(mAudioQueue.begin());
1612         return;
1613     }
1614 
1615     syncQueuesDone_l();
1616 }
1617 
syncQueuesDone_l()1618 void NuPlayer::Renderer::syncQueuesDone_l() {
1619     if (!mSyncQueues) {
1620         return;
1621     }
1622 
1623     mSyncQueues = false;
1624 
1625     if (!mAudioQueue.empty()) {
1626         postDrainAudioQueue_l();
1627     }
1628 
1629     if (!mVideoQueue.empty()) {
1630         mLock.unlock();
1631         postDrainVideoQueue();
1632         mLock.lock();
1633     }
1634 }
1635 
onQueueEOS(const sp<AMessage> & msg)1636 void NuPlayer::Renderer::onQueueEOS(const sp<AMessage> &msg) {
1637     int32_t audio;
1638     CHECK(msg->findInt32("audio", &audio));
1639 
1640     if (dropBufferIfStale(audio, msg)) {
1641         return;
1642     }
1643 
1644     int32_t finalResult;
1645     CHECK(msg->findInt32("finalResult", &finalResult));
1646 
1647     QueueEntry entry;
1648     entry.mOffset = 0;
1649     entry.mFinalResult = finalResult;
1650 
1651     if (audio) {
1652         Mutex::Autolock autoLock(mLock);
1653         if (mAudioQueue.empty() && mSyncQueues) {
1654             syncQueuesDone_l();
1655         }
1656         mAudioQueue.push_back(entry);
1657         postDrainAudioQueue_l();
1658     } else {
1659         if (mVideoQueue.empty() && getSyncQueues()) {
1660             Mutex::Autolock autoLock(mLock);
1661             syncQueuesDone_l();
1662         }
1663         mVideoQueue.push_back(entry);
1664         postDrainVideoQueue();
1665     }
1666 }
1667 
onFlush(const sp<AMessage> & msg)1668 void NuPlayer::Renderer::onFlush(const sp<AMessage> &msg) {
1669     int32_t audio, notifyComplete;
1670     CHECK(msg->findInt32("audio", &audio));
1671 
1672     {
1673         Mutex::Autolock autoLock(mLock);
1674         if (audio) {
1675             notifyComplete = mNotifyCompleteAudio;
1676             mNotifyCompleteAudio = false;
1677             mLastAudioMediaTimeUs = -1;
1678 
1679             mHasAudio = false;
1680             if (mNextVideoTimeMediaUs >= 0) {
1681                 int64_t nowUs = ALooper::GetNowUs();
1682                 mMediaClock->updateAnchor(
1683                         mNextVideoTimeMediaUs, nowUs,
1684                         mNextVideoTimeMediaUs + kDefaultVideoFrameIntervalUs);
1685             }
1686         } else {
1687             notifyComplete = mNotifyCompleteVideo;
1688             mNotifyCompleteVideo = false;
1689         }
1690 
1691         // If we're currently syncing the queues, i.e. dropping audio while
1692         // aligning the first audio/video buffer times and only one of the
1693         // two queues has data, we may starve that queue by not requesting
1694         // more buffers from the decoder. If the other source then encounters
1695         // a discontinuity that leads to flushing, we'll never find the
1696         // corresponding discontinuity on the other queue.
1697         // Therefore we'll stop syncing the queues if at least one of them
1698         // is flushed.
1699         syncQueuesDone_l();
1700     }
1701     clearAnchorTime();
1702 
1703     ALOGV("flushing %s", audio ? "audio" : "video");
1704     if (audio) {
1705         {
1706             Mutex::Autolock autoLock(mLock);
1707             flushQueue(&mAudioQueue);
1708 
1709             ++mAudioDrainGeneration;
1710             ++mAudioEOSGeneration;
1711             prepareForMediaRenderingStart_l();
1712 
1713             // the frame count will be reset after flush.
1714             clearAudioFirstAnchorTime_l();
1715         }
1716 
1717         mDrainAudioQueuePending = false;
1718 
1719         mAudioSink->pause();
1720         mAudioSink->flush();
1721         if (!offloadingAudio()) {
1722             // Call stop() to signal to the AudioSink to completely fill the
1723             // internal buffer before resuming playback.
1724             // FIXME: this is ignored after flush().
1725             mAudioSink->stop();
1726             mNumFramesWritten = 0;
1727         }
1728         if (!mPaused) {
1729             mAudioSink->start();
1730         }
1731         mNextAudioClockUpdateTimeUs = -1;
1732     } else {
1733         flushQueue(&mVideoQueue);
1734 
1735         mDrainVideoQueuePending = false;
1736 
1737         if (mVideoScheduler != NULL) {
1738             mVideoScheduler->restart();
1739         }
1740 
1741         Mutex::Autolock autoLock(mLock);
1742         ++mVideoDrainGeneration;
1743         prepareForMediaRenderingStart_l();
1744     }
1745 
1746     mVideoSampleReceived = false;
1747 
1748     if (notifyComplete) {
1749         notifyFlushComplete(audio);
1750     }
1751 }
1752 
flushQueue(List<QueueEntry> * queue)1753 void NuPlayer::Renderer::flushQueue(List<QueueEntry> *queue) {
1754     while (!queue->empty()) {
1755         QueueEntry *entry = &*queue->begin();
1756 
1757         if (entry->mBuffer != NULL) {
1758             entry->mNotifyConsumed->post();
1759         } else if (entry->mNotifyConsumed != nullptr) {
1760             // Is it needed to open audio sink now?
1761             onChangeAudioFormat(entry->mMeta, entry->mNotifyConsumed);
1762         }
1763 
1764         queue->erase(queue->begin());
1765         entry = NULL;
1766     }
1767 }
1768 
notifyFlushComplete(bool audio)1769 void NuPlayer::Renderer::notifyFlushComplete(bool audio) {
1770     sp<AMessage> notify = mNotify->dup();
1771     notify->setInt32("what", kWhatFlushComplete);
1772     notify->setInt32("audio", static_cast<int32_t>(audio));
1773     notify->post();
1774 }
1775 
dropBufferIfStale(bool audio,const sp<AMessage> & msg)1776 bool NuPlayer::Renderer::dropBufferIfStale(
1777         bool audio, const sp<AMessage> &msg) {
1778     int32_t queueGeneration;
1779     CHECK(msg->findInt32("queueGeneration", &queueGeneration));
1780 
1781     if (queueGeneration == getQueueGeneration(audio)) {
1782         return false;
1783     }
1784 
1785     sp<AMessage> notifyConsumed;
1786     if (msg->findMessage("notifyConsumed", &notifyConsumed)) {
1787         notifyConsumed->post();
1788     }
1789 
1790     return true;
1791 }
1792 
onAudioSinkChanged()1793 void NuPlayer::Renderer::onAudioSinkChanged() {
1794     if (offloadingAudio()) {
1795         return;
1796     }
1797     CHECK(!mDrainAudioQueuePending);
1798     mNumFramesWritten = 0;
1799     mAnchorNumFramesWritten = -1;
1800     uint32_t written;
1801     if (mAudioSink->getFramesWritten(&written) == OK) {
1802         mNumFramesWritten = written;
1803     }
1804 }
1805 
onDisableOffloadAudio()1806 void NuPlayer::Renderer::onDisableOffloadAudio() {
1807     Mutex::Autolock autoLock(mLock);
1808     mFlags &= ~FLAG_OFFLOAD_AUDIO;
1809     ++mAudioDrainGeneration;
1810     if (mAudioRenderingStartGeneration != -1) {
1811         prepareForMediaRenderingStart_l();
1812         // PauseTimeout is applied to offload mode only. Cancel pending timer.
1813         cancelAudioOffloadPauseTimeout();
1814     }
1815 }
1816 
onEnableOffloadAudio()1817 void NuPlayer::Renderer::onEnableOffloadAudio() {
1818     Mutex::Autolock autoLock(mLock);
1819     mFlags |= FLAG_OFFLOAD_AUDIO;
1820     ++mAudioDrainGeneration;
1821     if (mAudioRenderingStartGeneration != -1) {
1822         prepareForMediaRenderingStart_l();
1823     }
1824 }
1825 
onPause()1826 void NuPlayer::Renderer::onPause() {
1827     if (mPaused) {
1828         return;
1829     }
1830 
1831     startAudioOffloadPauseTimeout();
1832 
1833     {
1834         Mutex::Autolock autoLock(mLock);
1835         // we do not increment audio drain generation so that we fill audio buffer during pause.
1836         ++mVideoDrainGeneration;
1837         prepareForMediaRenderingStart_l();
1838         mPaused = true;
1839         mMediaClock->setPlaybackRate(0.0);
1840     }
1841 
1842     mDrainAudioQueuePending = false;
1843     mDrainVideoQueuePending = false;
1844 
1845     // Note: audio data may not have been decoded, and the AudioSink may not be opened.
1846     mAudioSink->pause();
1847 
1848     ALOGV("now paused audio queue has %zu entries, video has %zu entries",
1849           mAudioQueue.size(), mVideoQueue.size());
1850 }
1851 
onResume()1852 void NuPlayer::Renderer::onResume() {
1853     if (!mPaused) {
1854         return;
1855     }
1856 
1857     // Note: audio data may not have been decoded, and the AudioSink may not be opened.
1858     cancelAudioOffloadPauseTimeout();
1859     if (mAudioSink->ready()) {
1860         status_t err = mAudioSink->start();
1861         if (err != OK) {
1862             ALOGE("cannot start AudioSink err %d", err);
1863             notifyAudioTearDown(kDueToError);
1864         }
1865     }
1866 
1867     {
1868         Mutex::Autolock autoLock(mLock);
1869         mPaused = false;
1870         // rendering started message may have been delayed if we were paused.
1871         if (mRenderingDataDelivered) {
1872             notifyIfMediaRenderingStarted_l();
1873         }
1874         // configure audiosink as we did not do it when pausing
1875         if (mAudioSink != NULL && mAudioSink->ready()) {
1876             mAudioSink->setPlaybackRate(mPlaybackSettings);
1877         }
1878 
1879         mMediaClock->setPlaybackRate(mPlaybackRate);
1880 
1881         if (!mAudioQueue.empty()) {
1882             postDrainAudioQueue_l();
1883         }
1884     }
1885 
1886     if (!mVideoQueue.empty()) {
1887         postDrainVideoQueue();
1888     }
1889 }
1890 
onSetVideoFrameRate(float fps)1891 void NuPlayer::Renderer::onSetVideoFrameRate(float fps) {
1892     if (mVideoScheduler == NULL) {
1893         mVideoScheduler = new VideoFrameScheduler();
1894     }
1895     mVideoScheduler->init(fps);
1896 }
1897 
getQueueGeneration(bool audio)1898 int32_t NuPlayer::Renderer::getQueueGeneration(bool audio) {
1899     Mutex::Autolock autoLock(mLock);
1900     return (audio ? mAudioQueueGeneration : mVideoQueueGeneration);
1901 }
1902 
getDrainGeneration(bool audio)1903 int32_t NuPlayer::Renderer::getDrainGeneration(bool audio) {
1904     Mutex::Autolock autoLock(mLock);
1905     return (audio ? mAudioDrainGeneration : mVideoDrainGeneration);
1906 }
1907 
getSyncQueues()1908 bool NuPlayer::Renderer::getSyncQueues() {
1909     Mutex::Autolock autoLock(mLock);
1910     return mSyncQueues;
1911 }
1912 
onAudioTearDown(AudioTearDownReason reason)1913 void NuPlayer::Renderer::onAudioTearDown(AudioTearDownReason reason) {
1914     if (mAudioTornDown) {
1915         return;
1916     }
1917 
1918     // TimeoutWhenPaused is only for offload mode.
1919     if (reason == kDueToTimeout && !offloadingAudio()) {
1920         return;
1921     }
1922 
1923     mAudioTornDown = true;
1924 
1925     int64_t currentPositionUs;
1926     sp<AMessage> notify = mNotify->dup();
1927     if (getCurrentPosition(&currentPositionUs) == OK) {
1928         notify->setInt64("positionUs", currentPositionUs);
1929     }
1930 
1931     mAudioSink->stop();
1932     mAudioSink->flush();
1933 
1934     notify->setInt32("what", kWhatAudioTearDown);
1935     notify->setInt32("reason", reason);
1936     notify->post();
1937 }
1938 
startAudioOffloadPauseTimeout()1939 void NuPlayer::Renderer::startAudioOffloadPauseTimeout() {
1940     if (offloadingAudio()) {
1941         mWakeLock->acquire();
1942         mWakelockAcquireEvent.updateValues(uptimeMillis(),
1943                                            mAudioOffloadPauseTimeoutGeneration,
1944                                            mAudioOffloadPauseTimeoutGeneration);
1945         sp<AMessage> msg = new AMessage(kWhatAudioOffloadPauseTimeout, this);
1946         msg->setInt32("drainGeneration", mAudioOffloadPauseTimeoutGeneration);
1947         msg->post(kOffloadPauseMaxUs);
1948     }
1949 }
1950 
cancelAudioOffloadPauseTimeout()1951 void NuPlayer::Renderer::cancelAudioOffloadPauseTimeout() {
1952     // We may have called startAudioOffloadPauseTimeout() without
1953     // the AudioSink open and with offloadingAudio enabled.
1954     //
1955     // When we cancel, it may be that offloadingAudio is subsequently disabled, so regardless
1956     // we always release the wakelock and increment the pause timeout generation.
1957     //
1958     // Note: The acquired wakelock prevents the device from suspending
1959     // immediately after offload pause (in case a resume happens shortly thereafter).
1960     mWakeLock->release(true);
1961     mWakelockCancelEvent.updateValues(uptimeMillis(),
1962                                       mAudioOffloadPauseTimeoutGeneration,
1963                                       mAudioOffloadPauseTimeoutGeneration);
1964     ++mAudioOffloadPauseTimeoutGeneration;
1965 }
1966 
onOpenAudioSink(const sp<AMessage> & format,bool offloadOnly,bool hasVideo,uint32_t flags,bool isStreaming)1967 status_t NuPlayer::Renderer::onOpenAudioSink(
1968         const sp<AMessage> &format,
1969         bool offloadOnly,
1970         bool hasVideo,
1971         uint32_t flags,
1972         bool isStreaming) {
1973     ALOGV("openAudioSink: offloadOnly(%d) offloadingAudio(%d)",
1974             offloadOnly, offloadingAudio());
1975     bool audioSinkChanged = false;
1976 
1977     int32_t numChannels;
1978     CHECK(format->findInt32("channel-count", &numChannels));
1979 
1980     // channel mask info as read from the audio format
1981     int32_t mediaFormatChannelMask;
1982     // channel mask to use for native playback
1983     audio_channel_mask_t channelMask;
1984     if (format->findInt32("channel-mask", &mediaFormatChannelMask)) {
1985         // KEY_CHANNEL_MASK follows the android.media.AudioFormat java mask
1986         channelMask = audio_channel_mask_from_media_format_mask(mediaFormatChannelMask);
1987     } else {
1988         // no mask found: the mask will be derived from the channel count
1989         channelMask = CHANNEL_MASK_USE_CHANNEL_ORDER;
1990     }
1991 
1992     int32_t sampleRate;
1993     CHECK(format->findInt32("sample-rate", &sampleRate));
1994 
1995     // read pcm encoding from MediaCodec output format, if available
1996     int32_t pcmEncoding;
1997     audio_format_t audioFormat =
1998             format->findInt32(KEY_PCM_ENCODING, &pcmEncoding) ?
1999                     audioFormatFromEncoding(pcmEncoding) : AUDIO_FORMAT_PCM_16_BIT;
2000 
2001     if (offloadingAudio()) {
2002         AString mime;
2003         CHECK(format->findString("mime", &mime));
2004         status_t err = mapMimeToAudioFormat(audioFormat, mime.c_str());
2005 
2006         if (err != OK) {
2007             ALOGE("Couldn't map mime \"%s\" to a valid "
2008                     "audio_format", mime.c_str());
2009             onDisableOffloadAudio();
2010         } else {
2011             ALOGV("Mime \"%s\" mapped to audio_format 0x%x",
2012                     mime.c_str(), audioFormat);
2013 
2014             int avgBitRate = 0;
2015             format->findInt32("bitrate", &avgBitRate);
2016 
2017             int32_t aacProfile = -1;
2018             if (audioFormat == AUDIO_FORMAT_AAC
2019                     && format->findInt32("aac-profile", &aacProfile)) {
2020                 // Redefine AAC format as per aac profile
2021                 mapAACProfileToAudioFormat(
2022                         audioFormat,
2023                         aacProfile);
2024             }
2025 
2026             audio_offload_info_t offloadInfo = AUDIO_INFO_INITIALIZER;
2027             offloadInfo.duration_us = -1;
2028             format->findInt64(
2029                     "durationUs", &offloadInfo.duration_us);
2030             offloadInfo.sample_rate = sampleRate;
2031             offloadInfo.channel_mask = channelMask;
2032             offloadInfo.format = audioFormat;
2033             offloadInfo.stream_type = AUDIO_STREAM_MUSIC;
2034             offloadInfo.bit_rate = avgBitRate;
2035             offloadInfo.has_video = hasVideo;
2036             offloadInfo.is_streaming = isStreaming;
2037 
2038             if (memcmp(&mCurrentOffloadInfo, &offloadInfo, sizeof(offloadInfo)) == 0) {
2039                 ALOGV("openAudioSink: no change in offload mode");
2040                 // no change from previous configuration, everything ok.
2041                 return OK;
2042             }
2043             mCurrentPcmInfo = AUDIO_PCMINFO_INITIALIZER;
2044 
2045             ALOGV("openAudioSink: try to open AudioSink in offload mode");
2046             uint32_t offloadFlags = flags;
2047             offloadFlags |= AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD;
2048             offloadFlags &= ~AUDIO_OUTPUT_FLAG_DEEP_BUFFER;
2049             audioSinkChanged = true;
2050             mAudioSink->close();
2051 
2052             err = mAudioSink->open(
2053                     sampleRate,
2054                     numChannels,
2055                     (audio_channel_mask_t)channelMask,
2056                     audioFormat,
2057                     0 /* bufferCount - unused */,
2058                     &NuPlayer::Renderer::AudioSinkCallback,
2059                     this,
2060                     (audio_output_flags_t)offloadFlags,
2061                     &offloadInfo);
2062 
2063             if (err == OK) {
2064                 err = mAudioSink->setPlaybackRate(mPlaybackSettings);
2065             }
2066 
2067             if (err == OK) {
2068                 // If the playback is offloaded to h/w, we pass
2069                 // the HAL some metadata information.
2070                 // We don't want to do this for PCM because it
2071                 // will be going through the AudioFlinger mixer
2072                 // before reaching the hardware.
2073                 // TODO
2074                 mCurrentOffloadInfo = offloadInfo;
2075                 if (!mPaused) { // for preview mode, don't start if paused
2076                     err = mAudioSink->start();
2077                 }
2078                 ALOGV_IF(err == OK, "openAudioSink: offload succeeded");
2079             }
2080             if (err != OK) {
2081                 // Clean up, fall back to non offload mode.
2082                 mAudioSink->close();
2083                 onDisableOffloadAudio();
2084                 mCurrentOffloadInfo = AUDIO_INFO_INITIALIZER;
2085                 ALOGV("openAudioSink: offload failed");
2086                 if (offloadOnly) {
2087                     notifyAudioTearDown(kForceNonOffload);
2088                 }
2089             } else {
2090                 mUseAudioCallback = true;  // offload mode transfers data through callback
2091                 ++mAudioDrainGeneration;  // discard pending kWhatDrainAudioQueue message.
2092             }
2093         }
2094     }
2095     if (!offloadOnly && !offloadingAudio()) {
2096         ALOGV("openAudioSink: open AudioSink in NON-offload mode");
2097         uint32_t pcmFlags = flags;
2098         pcmFlags &= ~AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD;
2099 
2100         const PcmInfo info = {
2101                 (audio_channel_mask_t)channelMask,
2102                 (audio_output_flags_t)pcmFlags,
2103                 audioFormat,
2104                 numChannels,
2105                 sampleRate
2106         };
2107         if (memcmp(&mCurrentPcmInfo, &info, sizeof(info)) == 0) {
2108             ALOGV("openAudioSink: no change in pcm mode");
2109             // no change from previous configuration, everything ok.
2110             return OK;
2111         }
2112 
2113         audioSinkChanged = true;
2114         mAudioSink->close();
2115         mCurrentOffloadInfo = AUDIO_INFO_INITIALIZER;
2116         // Note: It is possible to set up the callback, but not use it to send audio data.
2117         // This requires a fix in AudioSink to explicitly specify the transfer mode.
2118         mUseAudioCallback = getUseAudioCallbackSetting();
2119         if (mUseAudioCallback) {
2120             ++mAudioDrainGeneration;  // discard pending kWhatDrainAudioQueue message.
2121         }
2122 
2123         // Compute the desired buffer size.
2124         // For callback mode, the amount of time before wakeup is about half the buffer size.
2125         const uint32_t frameCount =
2126                 (unsigned long long)sampleRate * getAudioSinkPcmMsSetting() / 1000;
2127 
2128         // The doNotReconnect means AudioSink will signal back and let NuPlayer to re-construct
2129         // AudioSink. We don't want this when there's video because it will cause a video seek to
2130         // the previous I frame. But we do want this when there's only audio because it will give
2131         // NuPlayer a chance to switch from non-offload mode to offload mode.
2132         // So we only set doNotReconnect when there's no video.
2133         const bool doNotReconnect = !hasVideo;
2134 
2135         // We should always be able to set our playback settings if the sink is closed.
2136         LOG_ALWAYS_FATAL_IF(mAudioSink->setPlaybackRate(mPlaybackSettings) != OK,
2137                 "onOpenAudioSink: can't set playback rate on closed sink");
2138         status_t err = mAudioSink->open(
2139                     sampleRate,
2140                     numChannels,
2141                     (audio_channel_mask_t)channelMask,
2142                     audioFormat,
2143                     0 /* bufferCount - unused */,
2144                     mUseAudioCallback ? &NuPlayer::Renderer::AudioSinkCallback : NULL,
2145                     mUseAudioCallback ? this : NULL,
2146                     (audio_output_flags_t)pcmFlags,
2147                     NULL,
2148                     doNotReconnect,
2149                     frameCount);
2150         if (err != OK) {
2151             ALOGW("openAudioSink: non offloaded open failed status: %d", err);
2152             mAudioSink->close();
2153             mCurrentPcmInfo = AUDIO_PCMINFO_INITIALIZER;
2154             return err;
2155         }
2156         mCurrentPcmInfo = info;
2157         if (!mPaused) { // for preview mode, don't start if paused
2158             mAudioSink->start();
2159         }
2160     }
2161     if (audioSinkChanged) {
2162         onAudioSinkChanged();
2163     }
2164     mAudioTornDown = false;
2165     return OK;
2166 }
2167 
onCloseAudioSink()2168 void NuPlayer::Renderer::onCloseAudioSink() {
2169     mAudioSink->close();
2170     mCurrentOffloadInfo = AUDIO_INFO_INITIALIZER;
2171     mCurrentPcmInfo = AUDIO_PCMINFO_INITIALIZER;
2172 }
2173 
onChangeAudioFormat(const sp<AMessage> & meta,const sp<AMessage> & notify)2174 void NuPlayer::Renderer::onChangeAudioFormat(
2175         const sp<AMessage> &meta, const sp<AMessage> &notify) {
2176     sp<AMessage> format;
2177     CHECK(meta->findMessage("format", &format));
2178 
2179     int32_t offloadOnly;
2180     CHECK(meta->findInt32("offload-only", &offloadOnly));
2181 
2182     int32_t hasVideo;
2183     CHECK(meta->findInt32("has-video", &hasVideo));
2184 
2185     uint32_t flags;
2186     CHECK(meta->findInt32("flags", (int32_t *)&flags));
2187 
2188     uint32_t isStreaming;
2189     CHECK(meta->findInt32("isStreaming", (int32_t *)&isStreaming));
2190 
2191     status_t err = onOpenAudioSink(format, offloadOnly, hasVideo, flags, isStreaming);
2192 
2193     if (err != OK) {
2194         notify->setInt32("err", err);
2195     }
2196     notify->post();
2197 }
2198 
dump(AString & logString)2199 void NuPlayer::Renderer::WakeLockEvent::dump(AString& logString) {
2200   logString.append("[");
2201   logString.append(mTimeMs);
2202   logString.append(",");
2203   logString.append(mEventTimeoutGeneration);
2204   logString.append(",");
2205   logString.append(mRendererTimeoutGeneration);
2206   logString.append("]");
2207 }
2208 
2209 }  // namespace android
2210