1 /*
2 * Copyright 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 //#define LOG_NDEBUG 0
18 #define LOG_TAG "MediaSync"
19 #include <inttypes.h>
20
21 #include <com_android_graphics_libgui_flags.h>
22
23 #if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_MEDIA_MIGRATION)
24 #include <gui/BufferItemConsumer.h>
25 #include <gui/Surface.h>
26 #else
27 #include <gui/IGraphicBufferConsumer.h>
28 #endif
29 #include <gui/BufferQueue.h>
30 #include <gui/IGraphicBufferProducer.h>
31
32 #include <media/AudioTrack.h>
33 #include <media/stagefright/MediaClock.h>
34 #include <media/stagefright/MediaSync.h>
35 #include <media/stagefright/VideoFrameScheduler.h>
36 #include <media/stagefright/foundation/ADebug.h>
37 #include <media/stagefright/foundation/ALooper.h>
38 #include <media/stagefright/foundation/AMessage.h>
39
40 #include <ui/GraphicBuffer.h>
41
42 #include <system/window.h>
43
44 // Maximum late time allowed for a video frame to be rendered. When a video
45 // frame arrives later than this number, it will be discarded without rendering.
46 static const int64_t kMaxAllowedVideoLateTimeUs = 40000LL;
47
48 namespace android {
49
50 // static
create()51 sp<MediaSync> MediaSync::create() {
52 sp<MediaSync> sync = new MediaSync();
53 sync->mLooper->registerHandler(sync);
54 return sync;
55 }
56
MediaSync()57 MediaSync::MediaSync()
58 : mIsAbandoned(false),
59 mMutex(),
60 mReleaseCondition(),
61 mNumOutstandingBuffers(0),
62 mUsageFlagsFromOutput(0),
63 mMaxAcquiredBufferCount(1),
64 mReturnPendingInputFrame(false),
65 mNativeSampleRateInHz(0),
66 mNumFramesWritten(0),
67 mHasAudio(false),
68 mNextBufferItemMediaUs(-1),
69 mPlaybackRate(0.0) {
70 mMediaClock = new MediaClock;
71 mMediaClock->init();
72
73 // initialize settings
74 mPlaybackSettings = AUDIO_PLAYBACK_RATE_DEFAULT;
75 mPlaybackSettings.mSpeed = mPlaybackRate;
76
77 mLooper = new ALooper;
78 mLooper->setName("MediaSync");
79 mLooper->start(false, false, ANDROID_PRIORITY_AUDIO);
80 }
81
~MediaSync()82 MediaSync::~MediaSync() {
83 if (mInput != NULL) {
84 #if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_MEDIA_MIGRATION)
85 mInput->abandon();
86 #else
87 mInput->consumerDisconnect();
88 #endif
89 }
90 if (mOutput != NULL) {
91 mOutput->disconnect(NATIVE_WINDOW_API_MEDIA);
92 }
93
94 if (mLooper != NULL) {
95 mLooper->unregisterHandler(id());
96 mLooper->stop();
97 }
98 }
99
setSurface(const sp<IGraphicBufferProducer> & output)100 status_t MediaSync::setSurface(const sp<IGraphicBufferProducer> &output) {
101 Mutex::Autolock lock(mMutex);
102
103 if (output == mOutput) {
104 return NO_ERROR; // same output surface.
105 }
106
107 if (output == NULL && mSyncSettings.mSource == AVSYNC_SOURCE_VSYNC) {
108 ALOGE("setSurface: output surface is used as sync source and cannot be removed.");
109 return INVALID_OPERATION;
110 }
111
112 if (output != NULL) {
113 int newUsage = 0;
114 output->query(NATIVE_WINDOW_CONSUMER_USAGE_BITS, &newUsage);
115
116 // Check usage flags only when current output surface has been used to create input surface.
117 if (mOutput != NULL && mInput != NULL) {
118 int ignoredFlags = (GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_HW_COMPOSER
119 | GRALLOC_USAGE_EXTERNAL_DISP);
120 // New output surface is not allowed to add new usage flag except ignored ones.
121 if ((newUsage & ~(mUsageFlagsFromOutput | ignoredFlags)) != 0) {
122 ALOGE("setSurface: new output surface has new usage flag not used by current one.");
123 return BAD_VALUE;
124 }
125 }
126
127 // Try to connect to new output surface. If failed, current output surface will not
128 // be changed.
129 IGraphicBufferProducer::QueueBufferOutput queueBufferOutput;
130 sp<OutputListener> listener(new OutputListener(this, output));
131 IInterface::asBinder(output)->linkToDeath(listener);
132 status_t status =
133 output->connect(listener,
134 NATIVE_WINDOW_API_MEDIA,
135 true /* producerControlledByApp */,
136 &queueBufferOutput);
137 if (status != NO_ERROR) {
138 ALOGE("setSurface: failed to connect (%d)", status);
139 return status;
140 }
141
142 if (mFrameScheduler == NULL) {
143 mFrameScheduler = new VideoFrameScheduler();
144 mFrameScheduler->init();
145 }
146 }
147
148 if (mOutput != NULL) {
149 mOutput->disconnect(NATIVE_WINDOW_API_MEDIA);
150 while (!mBuffersSentToOutput.isEmpty()) {
151 returnBufferToInput_l(mBuffersSentToOutput.valueAt(0), Fence::NO_FENCE);
152 mBuffersSentToOutput.removeItemsAt(0);
153 }
154 }
155
156 mOutput = output;
157
158 return NO_ERROR;
159 }
160
161 // |audioTrack| is used only for querying information.
setAudioTrack(const sp<AudioTrack> & audioTrack)162 status_t MediaSync::setAudioTrack(const sp<AudioTrack> &audioTrack) {
163 Mutex::Autolock lock(mMutex);
164
165 // TODO: support audio track change.
166 if (mAudioTrack != NULL) {
167 ALOGE("setAudioTrack: audioTrack has already been configured.");
168 return INVALID_OPERATION;
169 }
170
171 if (audioTrack == NULL && mSyncSettings.mSource == AVSYNC_SOURCE_AUDIO) {
172 ALOGE("setAudioTrack: audioTrack is used as sync source and cannot be removed.");
173 return INVALID_OPERATION;
174 }
175
176 if (audioTrack != NULL) {
177 // check if audio track supports the playback settings
178 if (mPlaybackSettings.mSpeed != 0.f
179 && audioTrack->setPlaybackRate(mPlaybackSettings) != OK) {
180 ALOGE("playback settings are not supported by the audio track");
181 return INVALID_OPERATION;
182 }
183 uint32_t nativeSampleRateInHz = audioTrack->getOriginalSampleRate();
184 if (nativeSampleRateInHz <= 0) {
185 ALOGE("setAudioTrack: native sample rate should be positive.");
186 return BAD_VALUE;
187 }
188 mAudioTrack = audioTrack;
189 mNativeSampleRateInHz = nativeSampleRateInHz;
190 (void)setPlaybackSettings_l(mPlaybackSettings);
191 }
192 else {
193 mAudioTrack = NULL;
194 mNativeSampleRateInHz = 0;
195 }
196
197 // potentially resync to new source
198 resync_l();
199 return OK;
200 }
201
createInputSurface(sp<IGraphicBufferProducer> * outBufferProducer)202 status_t MediaSync::createInputSurface(
203 sp<IGraphicBufferProducer> *outBufferProducer) {
204 if (outBufferProducer == NULL) {
205 return BAD_VALUE;
206 }
207
208 Mutex::Autolock lock(mMutex);
209
210 if (mOutput == NULL) {
211 return NO_INIT;
212 }
213
214 if (mInput != NULL) {
215 return INVALID_OPERATION;
216 }
217
218 #if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_MEDIA_MIGRATION)
219 int usageFlags = 0;
220 mOutput->query(NATIVE_WINDOW_CONSUMER_USAGE_BITS, &usageFlags);
221
222 auto [newInput, surface] = BufferItemConsumer::create(usageFlags);
223
224 sp<InputListener> listener(new InputListener(this));
225 newInput->setFrameAvailableListener(listener);
226 newInput->setName(String8("MediaSync"));
227 // propagate usage bits from output surface
228 status_t status = newInput->setConsumerUsageBits(usageFlags);
229 if (status != OK) {
230 ALOGE("%s: Unable to set usage bits to %d", __FUNCTION__, usageFlags);
231 return status;
232 }
233
234 // set undequeued buffer count
235 int minUndequeuedBuffers;
236 mOutput->query(NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, &minUndequeuedBuffers);
237 status = newInput->setMaxAcquiredBufferCount(minUndequeuedBuffers);
238 if (status != OK) {
239 ALOGE("%s: Unable to set setMaxAcquiredBufferCount to %d", __FUNCTION__,
240 minUndequeuedBuffers);
241 return status;
242 }
243
244 mMaxAcquiredBufferCount = minUndequeuedBuffers;
245 mUsageFlagsFromOutput = usageFlags;
246 mInput = newInput;
247 mListener = listener;
248 *outBufferProducer = surface->getIGraphicBufferProducer();
249 return OK;
250 #else
251 sp<IGraphicBufferProducer> bufferProducer;
252 sp<IGraphicBufferConsumer> bufferConsumer;
253 BufferQueue::createBufferQueue(&bufferProducer, &bufferConsumer);
254
255 sp<InputListener> listener(new InputListener(this));
256 status_t status =
257 bufferConsumer->consumerConnect(listener, false /* controlledByApp */);
258 if (status == NO_ERROR) {
259 bufferConsumer->setConsumerName(String8("MediaSync"));
260 // propagate usage bits from output surface
261 mUsageFlagsFromOutput = 0;
262 mOutput->query(NATIVE_WINDOW_CONSUMER_USAGE_BITS, &mUsageFlagsFromOutput);
263 bufferConsumer->setConsumerUsageBits(mUsageFlagsFromOutput);
264 *outBufferProducer = bufferProducer;
265 mInput = bufferConsumer;
266
267 // set undequeued buffer count
268 int minUndequeuedBuffers;
269 mOutput->query(NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, &minUndequeuedBuffers);
270 mMaxAcquiredBufferCount = minUndequeuedBuffers;
271 bufferConsumer->setMaxAcquiredBufferCount(mMaxAcquiredBufferCount);
272 }
273 return status;
274 #endif
275 }
276
resync_l()277 void MediaSync::resync_l() {
278 AVSyncSource src = mSyncSettings.mSource;
279 if (src == AVSYNC_SOURCE_DEFAULT) {
280 if (mAudioTrack != NULL) {
281 src = AVSYNC_SOURCE_AUDIO;
282 } else {
283 src = AVSYNC_SOURCE_SYSTEM_CLOCK;
284 }
285 }
286
287 // TODO: resync ourselves to the current clock (e.g. on sync source change)
288 updatePlaybackRate_l(mPlaybackRate);
289 }
290
updatePlaybackRate_l(float rate)291 void MediaSync::updatePlaybackRate_l(float rate) {
292 if (rate > mPlaybackRate) {
293 mNextBufferItemMediaUs = -1;
294 }
295 mPlaybackRate = rate;
296 // TODO: update frame scheduler with this info
297 mMediaClock->setPlaybackRate(rate);
298 onDrainVideo_l();
299 }
300
getMediaClock()301 sp<const MediaClock> MediaSync::getMediaClock() {
302 return mMediaClock;
303 }
304
getPlayTimeForPendingAudioFrames(int64_t * outTimeUs)305 status_t MediaSync::getPlayTimeForPendingAudioFrames(int64_t *outTimeUs) {
306 Mutex::Autolock lock(mMutex);
307 // User should check the playback rate if it doesn't want to receive a
308 // huge number for play time.
309 if (mPlaybackRate == 0.0f) {
310 *outTimeUs = INT64_MAX;
311 return OK;
312 }
313
314 uint32_t numFramesPlayed = 0;
315 if (mAudioTrack != NULL) {
316 status_t res = mAudioTrack->getPosition(&numFramesPlayed);
317 if (res != OK) {
318 return res;
319 }
320 }
321
322 int64_t numPendingFrames = mNumFramesWritten - numFramesPlayed;
323 if (numPendingFrames < 0) {
324 numPendingFrames = 0;
325 ALOGW("getPlayTimeForPendingAudioFrames: pending frame count is negative.");
326 }
327 double timeUs = numPendingFrames * 1000000.0
328 / (mNativeSampleRateInHz * (double)mPlaybackRate);
329 if (timeUs > (double)INT64_MAX) {
330 // Overflow.
331 *outTimeUs = INT64_MAX;
332 ALOGW("getPlayTimeForPendingAudioFrames: play time for pending audio frames "
333 "is too high, possibly due to super low playback rate(%f)", mPlaybackRate);
334 } else {
335 *outTimeUs = (int64_t)timeUs;
336 }
337
338 return OK;
339 }
340
updateQueuedAudioData(size_t sizeInBytes,int64_t presentationTimeUs)341 status_t MediaSync::updateQueuedAudioData(
342 size_t sizeInBytes, int64_t presentationTimeUs) {
343 if (sizeInBytes == 0) {
344 return OK;
345 }
346
347 Mutex::Autolock lock(mMutex);
348
349 if (mAudioTrack == NULL) {
350 ALOGW("updateQueuedAudioData: audioTrack has NOT been configured.");
351 return INVALID_OPERATION;
352 }
353
354 int64_t numFrames = sizeInBytes / mAudioTrack->frameSize();
355 int64_t maxMediaTimeUs = presentationTimeUs
356 + getDurationIfPlayedAtNativeSampleRate_l(numFrames);
357
358 int64_t nowUs = ALooper::GetNowUs();
359 int64_t nowMediaUs = presentationTimeUs
360 - getDurationIfPlayedAtNativeSampleRate_l(mNumFramesWritten)
361 + getPlayedOutAudioDurationMedia_l(nowUs);
362
363 mNumFramesWritten += numFrames;
364
365 int64_t oldRealTime = -1;
366 if (mNextBufferItemMediaUs != -1) {
367 oldRealTime = getRealTime(mNextBufferItemMediaUs, nowUs);
368 }
369
370 mMediaClock->updateAnchor(nowMediaUs, nowUs, maxMediaTimeUs);
371 mHasAudio = true;
372
373 if (oldRealTime != -1) {
374 int64_t newRealTime = getRealTime(mNextBufferItemMediaUs, nowUs);
375 if (newRealTime >= oldRealTime) {
376 return OK;
377 }
378 }
379
380 mNextBufferItemMediaUs = -1;
381 onDrainVideo_l();
382 return OK;
383 }
384
setName(const AString & name)385 void MediaSync::setName(const AString &name) {
386 Mutex::Autolock lock(mMutex);
387 #if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_MEDIA_MIGRATION)
388 if (mInput) {
389 mInput->setName(String8(name.c_str()));
390 } else {
391 ALOGE("%s with name %s called without an mInput set", __FUNCTION__, name.c_str());
392 }
393 #else
394 mInput->setConsumerName(String8(name.c_str()));
395 #endif
396 }
397
flush()398 void MediaSync::flush() {
399 Mutex::Autolock lock(mMutex);
400 if (mFrameScheduler != NULL) {
401 mFrameScheduler->restart();
402 }
403 while (!mBufferItems.empty()) {
404 BufferItem *bufferItem = &*mBufferItems.begin();
405 returnBufferToInput_l(bufferItem->mGraphicBuffer, bufferItem->mFence);
406 mBufferItems.erase(mBufferItems.begin());
407 }
408 mNextBufferItemMediaUs = -1;
409 mNumFramesWritten = 0;
410 mReturnPendingInputFrame = true;
411 mReleaseCondition.signal();
412 mMediaClock->clearAnchor();
413 }
414
setVideoFrameRateHint(float rate)415 status_t MediaSync::setVideoFrameRateHint(float rate) {
416 Mutex::Autolock lock(mMutex);
417 if (rate < 0.f) {
418 return BAD_VALUE;
419 }
420 if (mFrameScheduler != NULL) {
421 mFrameScheduler->init(rate);
422 }
423 return OK;
424 }
425
getVideoFrameRate()426 float MediaSync::getVideoFrameRate() {
427 Mutex::Autolock lock(mMutex);
428 if (mFrameScheduler != NULL) {
429 float fps = mFrameScheduler->getFrameRate();
430 if (fps > 0.f) {
431 return fps;
432 }
433 }
434
435 // we don't have or know the frame rate
436 return -1.f;
437 }
438
setSyncSettings(const AVSyncSettings & syncSettings)439 status_t MediaSync::setSyncSettings(const AVSyncSettings &syncSettings) {
440 // validate settings
441 if (syncSettings.mSource >= AVSYNC_SOURCE_MAX
442 || syncSettings.mAudioAdjustMode >= AVSYNC_AUDIO_ADJUST_MODE_MAX
443 || syncSettings.mTolerance < 0.f
444 || syncSettings.mTolerance >= AVSYNC_TOLERANCE_MAX) {
445 return BAD_VALUE;
446 }
447
448 Mutex::Autolock lock(mMutex);
449
450 // verify that we have the sync source
451 switch (syncSettings.mSource) {
452 case AVSYNC_SOURCE_AUDIO:
453 if (mAudioTrack == NULL) {
454 ALOGE("setSyncSettings: audio sync source requires an audio track");
455 return BAD_VALUE;
456 }
457 break;
458 case AVSYNC_SOURCE_VSYNC:
459 if (mOutput == NULL) {
460 ALOGE("setSyncSettings: vsync sync source requires an output surface");
461 return BAD_VALUE;
462 }
463 break;
464 default:
465 break;
466 }
467
468 mSyncSettings = syncSettings;
469 resync_l();
470 return OK;
471 }
472
getSyncSettings(AVSyncSettings * syncSettings)473 void MediaSync::getSyncSettings(AVSyncSettings *syncSettings) {
474 Mutex::Autolock lock(mMutex);
475 *syncSettings = mSyncSettings;
476 }
477
setPlaybackSettings(const AudioPlaybackRate & rate)478 status_t MediaSync::setPlaybackSettings(const AudioPlaybackRate &rate) {
479 Mutex::Autolock lock(mMutex);
480
481 status_t err = setPlaybackSettings_l(rate);
482 if (err == OK) {
483 // TODO: adjust rate if using VSYNC as source
484 updatePlaybackRate_l(rate.mSpeed);
485 }
486 return err;
487 }
488
setPlaybackSettings_l(const AudioPlaybackRate & rate)489 status_t MediaSync::setPlaybackSettings_l(const AudioPlaybackRate &rate) {
490 if (rate.mSpeed < 0.f || rate.mPitch < 0.f) {
491 // We don't validate other audio settings.
492 // They will be validated when/if audiotrack is set.
493 return BAD_VALUE;
494 }
495
496 if (mAudioTrack != NULL) {
497 if (rate.mSpeed == 0.f) {
498 mAudioTrack->pause();
499 } else {
500 status_t err = mAudioTrack->setPlaybackRate(rate);
501 if (err != OK) {
502 return BAD_VALUE;
503 }
504
505 // ignore errors
506 (void)mAudioTrack->start();
507 }
508 }
509 mPlaybackSettings = rate;
510 return OK;
511 }
512
getPlaybackSettings(AudioPlaybackRate * rate)513 void MediaSync::getPlaybackSettings(AudioPlaybackRate *rate) {
514 Mutex::Autolock lock(mMutex);
515 *rate = mPlaybackSettings;
516 }
517
getRealTime(int64_t mediaTimeUs,int64_t nowUs)518 int64_t MediaSync::getRealTime(int64_t mediaTimeUs, int64_t nowUs) {
519 int64_t realUs;
520 if (mMediaClock->getRealTimeFor(mediaTimeUs, &realUs) != OK) {
521 // If failed to get current position, e.g. due to audio clock is
522 // not ready, then just play out video immediately without delay.
523 return nowUs;
524 }
525 return realUs;
526 }
527
getDurationIfPlayedAtNativeSampleRate_l(int64_t numFrames)528 int64_t MediaSync::getDurationIfPlayedAtNativeSampleRate_l(int64_t numFrames) {
529 return (numFrames * 1000000LL / mNativeSampleRateInHz);
530 }
531
getPlayedOutAudioDurationMedia_l(int64_t nowUs)532 int64_t MediaSync::getPlayedOutAudioDurationMedia_l(int64_t nowUs) {
533 CHECK(mAudioTrack != NULL);
534
535 uint32_t numFramesPlayed;
536 int64_t numFramesPlayedAtUs;
537 AudioTimestamp ts;
538
539 status_t res = mAudioTrack->getTimestamp(ts);
540 if (res == OK) {
541 // case 1: mixing audio tracks.
542 numFramesPlayed = ts.mPosition;
543 numFramesPlayedAtUs = ts.mTime.tv_sec * 1000000LL + ts.mTime.tv_nsec / 1000;
544 //ALOGD("getTimestamp: OK %d %lld",
545 // numFramesPlayed, (long long)numFramesPlayedAtUs);
546 } else if (res == WOULD_BLOCK) {
547 // case 2: transitory state on start of a new track
548 numFramesPlayed = 0;
549 numFramesPlayedAtUs = nowUs;
550 //ALOGD("getTimestamp: WOULD_BLOCK %d %lld",
551 // numFramesPlayed, (long long)numFramesPlayedAtUs);
552 } else {
553 // case 3: transitory at new track or audio fast tracks.
554 res = mAudioTrack->getPosition(&numFramesPlayed);
555 CHECK_EQ(res, (status_t)OK);
556 numFramesPlayedAtUs = nowUs;
557 numFramesPlayedAtUs += 1000LL * mAudioTrack->latency() / 2; /* XXX */
558 //ALOGD("getPosition: %d %lld", numFramesPlayed, (long long)numFramesPlayedAtUs);
559 }
560
561 //can't be negative until 12.4 hrs, test.
562 //CHECK_EQ(numFramesPlayed & (1 << 31), 0);
563 int64_t durationUs =
564 getDurationIfPlayedAtNativeSampleRate_l(numFramesPlayed)
565 + nowUs - numFramesPlayedAtUs;
566 if (durationUs < 0) {
567 // Occurs when numFramesPlayed position is very small and the following:
568 // (1) In case 1, the time nowUs is computed before getTimestamp() is
569 // called and numFramesPlayedAtUs is greater than nowUs by time more
570 // than numFramesPlayed.
571 // (2) In case 3, using getPosition and adding mAudioTrack->latency()
572 // to numFramesPlayedAtUs, by a time amount greater than
573 // numFramesPlayed.
574 //
575 // Both of these are transitory conditions.
576 ALOGV("getPlayedOutAudioDurationMedia_l: negative duration %lld "
577 "set to zero", (long long)durationUs);
578 durationUs = 0;
579 }
580 ALOGV("getPlayedOutAudioDurationMedia_l(%lld) nowUs(%lld) frames(%u) "
581 "framesAt(%lld)",
582 (long long)durationUs, (long long)nowUs, numFramesPlayed,
583 (long long)numFramesPlayedAtUs);
584 return durationUs;
585 }
586
onDrainVideo_l()587 void MediaSync::onDrainVideo_l() {
588 if (!isPlaying()) {
589 return;
590 }
591
592 while (!mBufferItems.empty()) {
593 int64_t nowUs = ALooper::GetNowUs();
594 BufferItem *bufferItem = &*mBufferItems.begin();
595 int64_t itemMediaUs = bufferItem->mTimestamp / 1000;
596 int64_t itemRealUs = getRealTime(itemMediaUs, nowUs);
597
598 // adjust video frame PTS based on vsync
599 itemRealUs = mFrameScheduler->schedule(itemRealUs * 1000) / 1000;
600 int64_t twoVsyncsUs = 2 * (mFrameScheduler->getVsyncPeriod() / 1000);
601
602 // post 2 display refreshes before rendering is due
603 if (itemRealUs <= nowUs + twoVsyncsUs) {
604 ALOGV("adjusting PTS from %lld to %lld",
605 (long long)bufferItem->mTimestamp / 1000, (long long)itemRealUs);
606 bufferItem->mTimestamp = itemRealUs * 1000;
607 bufferItem->mIsAutoTimestamp = false;
608
609 if (mHasAudio) {
610 if (nowUs - itemRealUs <= kMaxAllowedVideoLateTimeUs) {
611 renderOneBufferItem_l(*bufferItem);
612 } else {
613 // too late.
614 returnBufferToInput_l(
615 bufferItem->mGraphicBuffer, bufferItem->mFence);
616 mFrameScheduler->restart();
617 }
618 } else {
619 // always render video buffer in video-only mode.
620 renderOneBufferItem_l(*bufferItem);
621
622 // smooth out videos >= 10fps
623 mMediaClock->updateAnchor(
624 itemMediaUs, nowUs, itemMediaUs + 100000);
625 }
626
627 mBufferItems.erase(mBufferItems.begin());
628 mNextBufferItemMediaUs = -1;
629 } else {
630 if (mNextBufferItemMediaUs == -1
631 || mNextBufferItemMediaUs > itemMediaUs) {
632 sp<AMessage> msg = new AMessage(kWhatDrainVideo, this);
633 msg->post(itemRealUs - nowUs - twoVsyncsUs);
634 mNextBufferItemMediaUs = itemMediaUs;
635 }
636 break;
637 }
638 }
639 }
640
onFrameAvailableFromInput()641 void MediaSync::onFrameAvailableFromInput() {
642 Mutex::Autolock lock(mMutex);
643
644 const static nsecs_t kAcquireWaitTimeout = 2000000000; // 2 seconds
645
646 mReturnPendingInputFrame = false;
647
648 // If there are too many outstanding buffers, wait until a buffer is
649 // released back to the input in onBufferReleased.
650 // NOTE: BufferQueue allows dequeuing maxAcquiredBufferCount + 1 buffers
651 while (mNumOutstandingBuffers > mMaxAcquiredBufferCount
652 && !mIsAbandoned && !mReturnPendingInputFrame) {
653 if (mReleaseCondition.waitRelative(mMutex, kAcquireWaitTimeout) != OK) {
654 ALOGI_IF(mPlaybackRate != 0.f, "still waiting to release a buffer before acquire");
655 }
656
657 // If the sync is abandoned while we are waiting, the release
658 // condition variable will be broadcast, and we should just return
659 // without attempting to do anything more (since the input queue will
660 // also be abandoned).
661 if (mIsAbandoned) {
662 return;
663 }
664 }
665
666 // Acquire and detach the buffer from the input.
667 BufferItem bufferItem;
668 status_t status = mInput->acquireBuffer(&bufferItem, 0 /* presentWhen */);
669 if (status != NO_ERROR) {
670 ALOGE("acquiring buffer from input failed (%d)", status);
671 return;
672 }
673 ++mNumOutstandingBuffers;
674
675 ALOGV("acquired buffer %#llx from input", (long long)bufferItem.mGraphicBuffer->getId());
676
677 #if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_MEDIA_MIGRATION)
678 status = mInput->detachBuffer(bufferItem.mGraphicBuffer);
679 #else
680 status = mInput->detachBuffer(bufferItem.mSlot);
681 #endif
682 if (status != NO_ERROR) {
683 ALOGE("detaching buffer from input failed (%d)", status);
684 if (status == NO_INIT) {
685 // If the input has been abandoned, move on.
686 onAbandoned_l(true /* isInput */);
687 }
688 return;
689 }
690
691 if (mBuffersFromInput.indexOfKey(bufferItem.mGraphicBuffer->getId()) >= 0) {
692 // Something is wrong since this buffer should be at our hands, bail.
693 ALOGE("received buffer multiple times from input");
694 #if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_MEDIA_MIGRATION)
695 mInput->abandon();
696 #else
697 mInput->consumerDisconnect();
698 #endif
699 onAbandoned_l(true /* isInput */);
700 return;
701 }
702 mBuffersFromInput.add(bufferItem.mGraphicBuffer->getId(), bufferItem.mGraphicBuffer);
703
704 // If flush happened while waiting for a buffer to be released, simply return it
705 // TRICKY: do it here after it is detached so that we don't have to cache mGraphicBuffer.
706 if (mReturnPendingInputFrame) {
707 mReturnPendingInputFrame = false;
708 returnBufferToInput_l(bufferItem.mGraphicBuffer, bufferItem.mFence);
709 return;
710 }
711
712 mBufferItems.push_back(bufferItem);
713
714 if (mBufferItems.size() == 1) {
715 onDrainVideo_l();
716 }
717 }
718
renderOneBufferItem_l(const BufferItem & bufferItem)719 void MediaSync::renderOneBufferItem_l(const BufferItem &bufferItem) {
720 IGraphicBufferProducer::QueueBufferInput queueInput(
721 bufferItem.mTimestamp,
722 bufferItem.mIsAutoTimestamp,
723 bufferItem.mDataSpace,
724 bufferItem.mCrop,
725 static_cast<int32_t>(bufferItem.mScalingMode),
726 bufferItem.mTransform,
727 bufferItem.mFence);
728
729 // Attach and queue the buffer to the output.
730 int slot;
731 mOutput->setGenerationNumber(bufferItem.mGraphicBuffer->getGenerationNumber());
732 status_t status = mOutput->attachBuffer(&slot, bufferItem.mGraphicBuffer);
733 ALOGE_IF(status != NO_ERROR, "attaching buffer to output failed (%d)", status);
734 if (status == NO_ERROR) {
735 IGraphicBufferProducer::QueueBufferOutput queueOutput;
736 status = mOutput->queueBuffer(slot, queueInput, &queueOutput);
737 ALOGE_IF(status != NO_ERROR, "queueing buffer to output failed (%d)", status);
738 }
739
740 if (status != NO_ERROR) {
741 returnBufferToInput_l(bufferItem.mGraphicBuffer, bufferItem.mFence);
742 if (status == NO_INIT) {
743 // If the output has been abandoned, move on.
744 onAbandoned_l(false /* isInput */);
745 }
746 return;
747 }
748
749 if (mBuffersSentToOutput.indexOfKey(bufferItem.mGraphicBuffer->getId()) >= 0) {
750 // Something is wrong since this buffer should be held by output now, bail.
751 #if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_MEDIA_MIGRATION)
752 mInput->abandon();
753 #else
754 mInput->consumerDisconnect();
755 #endif
756 onAbandoned_l(true /* isInput */);
757 return;
758 }
759 mBuffersSentToOutput.add(bufferItem.mGraphicBuffer->getId(), bufferItem.mGraphicBuffer);
760
761 ALOGV("queued buffer %#llx to output", (long long)bufferItem.mGraphicBuffer->getId());
762 }
763
onBufferReleasedByOutput(sp<IGraphicBufferProducer> & output)764 void MediaSync::onBufferReleasedByOutput(sp<IGraphicBufferProducer> &output) {
765 Mutex::Autolock lock(mMutex);
766
767 if (output != mOutput) {
768 return; // This is not the current output, ignore.
769 }
770
771 sp<GraphicBuffer> buffer;
772 sp<Fence> fence;
773 status_t status = mOutput->detachNextBuffer(&buffer, &fence);
774 ALOGE_IF(status != NO_ERROR, "detaching buffer from output failed (%d)", status);
775
776 if (status == NO_INIT) {
777 // If the output has been abandoned, we can't do anything else,
778 // since buffer is invalid.
779 onAbandoned_l(false /* isInput */);
780 return;
781 }
782
783 ALOGV("detached buffer %#llx from output", (long long)buffer->getId());
784
785 // If we've been abandoned, we can't return the buffer to the input, so just
786 // move on.
787 if (mIsAbandoned) {
788 return;
789 }
790
791 ssize_t ix = mBuffersSentToOutput.indexOfKey(buffer->getId());
792 if (ix < 0) {
793 // The buffer is unknown, maybe leftover, ignore.
794 return;
795 }
796 mBuffersSentToOutput.removeItemsAt(ix);
797
798 returnBufferToInput_l(buffer, fence);
799 }
800
returnBufferToInput_l(const sp<GraphicBuffer> & buffer,const sp<Fence> & fence)801 void MediaSync::returnBufferToInput_l(
802 const sp<GraphicBuffer> &buffer, const sp<Fence> &fence) {
803 ssize_t ix = mBuffersFromInput.indexOfKey(buffer->getId());
804 if (ix < 0) {
805 // The buffer is unknown, something is wrong, bail.
806 ALOGE("output returned unknown buffer");
807 mOutput->disconnect(NATIVE_WINDOW_API_MEDIA);
808 onAbandoned_l(false /* isInput */);
809 return;
810 }
811 sp<GraphicBuffer> oldBuffer = mBuffersFromInput.valueAt(ix);
812 mBuffersFromInput.removeItemsAt(ix);
813
814 // Attach and release the buffer back to the input.
815 int consumerSlot;
816 #if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_MEDIA_MIGRATION)
817 status_t status = mInput->attachBuffer(oldBuffer);
818 #else
819 status_t status = mInput->attachBuffer(&consumerSlot, oldBuffer);
820 #endif
821 ALOGE_IF(status != NO_ERROR, "attaching buffer to input failed (%d)", status);
822 if (status == NO_ERROR) {
823 #if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_MEDIA_MIGRATION)
824 mInput->releaseBuffer(oldBuffer, fence);
825 #else
826 status = mInput->releaseBuffer(consumerSlot, 0 /* frameNumber */, fence);
827 #endif
828 ALOGE_IF(status != NO_ERROR, "releasing buffer to input failed (%d)", status);
829 }
830
831 // Notify any waiting onFrameAvailable calls.
832 --mNumOutstandingBuffers;
833 mReleaseCondition.signal();
834
835 if (status == NO_ERROR) {
836 ALOGV("released buffer %#llx to input", (long long)oldBuffer->getId());
837 }
838 }
839
onAbandoned_l(bool isInput)840 void MediaSync::onAbandoned_l(bool isInput) {
841 ALOGE("the %s has abandoned me", (isInput ? "input" : "output"));
842 if (!mIsAbandoned) {
843 if (isInput) {
844 mOutput->disconnect(NATIVE_WINDOW_API_MEDIA);
845 } else {
846 #if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_MEDIA_MIGRATION)
847 mInput->abandon();
848 #else
849 mInput->consumerDisconnect();
850 #endif
851 }
852 mIsAbandoned = true;
853 }
854 mReleaseCondition.broadcast();
855 }
856
onMessageReceived(const sp<AMessage> & msg)857 void MediaSync::onMessageReceived(const sp<AMessage> &msg) {
858 switch (msg->what()) {
859 case kWhatDrainVideo:
860 {
861 Mutex::Autolock lock(mMutex);
862 if (mNextBufferItemMediaUs != -1) {
863 int64_t nowUs = ALooper::GetNowUs();
864 int64_t itemRealUs = getRealTime(mNextBufferItemMediaUs, nowUs);
865
866 // The message could arrive earlier than expected due to
867 // various reasons, e.g., media clock has been changed because
868 // of new anchor time or playback rate. In such cases, the
869 // message needs to be re-posted.
870 if (itemRealUs > nowUs) {
871 msg->post(itemRealUs - nowUs);
872 break;
873 }
874 }
875
876 onDrainVideo_l();
877 break;
878 }
879
880 default:
881 TRESPASS();
882 break;
883 }
884 }
885
InputListener(const sp<MediaSync> & sync)886 MediaSync::InputListener::InputListener(const sp<MediaSync> &sync)
887 : mSync(sync) {}
888
~InputListener()889 MediaSync::InputListener::~InputListener() {}
890
onFrameAvailable(const BufferItem &)891 void MediaSync::InputListener::onFrameAvailable(const BufferItem &/* item */) {
892 mSync->onFrameAvailableFromInput();
893 }
894
895 #if !COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_MEDIA_MIGRATION)
896 // We don't care about sideband streams, since we won't relay them.
onSidebandStreamChanged()897 void MediaSync::InputListener::onSidebandStreamChanged() {
898 ALOGE("onSidebandStreamChanged: got sideband stream unexpectedly.");
899 }
900
901
binderDied(const wp<IBinder> &)902 void MediaSync::InputListener::binderDied(const wp<IBinder> &/* who */) {
903 Mutex::Autolock lock(mSync->mMutex);
904 mSync->onAbandoned_l(true /* isInput */);
905 }
906 #endif
907
OutputListener(const sp<MediaSync> & sync,const sp<IGraphicBufferProducer> & output)908 MediaSync::OutputListener::OutputListener(const sp<MediaSync> &sync,
909 const sp<IGraphicBufferProducer> &output)
910 : mSync(sync),
911 mOutput(output) {}
912
~OutputListener()913 MediaSync::OutputListener::~OutputListener() {}
914
onBufferReleased()915 void MediaSync::OutputListener::onBufferReleased() {
916 mSync->onBufferReleasedByOutput(mOutput);
917 }
918
binderDied(const wp<IBinder> &)919 void MediaSync::OutputListener::binderDied(const wp<IBinder> &/* who */) {
920 Mutex::Autolock lock(mSync->mMutex);
921 mSync->onAbandoned_l(false /* isInput */);
922 }
923
924 } // namespace android
925