1 /*
2 * Copyright 2012, The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 //#define LOG_NDEBUG 0
18 #define LOG_TAG "PlaybackSession"
19 #include <utils/Log.h>
20
21 #include "PlaybackSession.h"
22
23 #include "Converter.h"
24 #include "MediaPuller.h"
25 #include "RepeaterSource.h"
26 #include "include/avc_utils.h"
27 #include "WifiDisplaySource.h"
28
29 #include <binder/IServiceManager.h>
30 #include <cutils/properties.h>
31 #include <media/IHDCP.h>
32 #include <media/IMediaHTTPService.h>
33 #include <media/stagefright/foundation/ABitReader.h>
34 #include <media/stagefright/foundation/ABuffer.h>
35 #include <media/stagefright/foundation/ADebug.h>
36 #include <media/stagefright/foundation/AMessage.h>
37 #include <media/stagefright/foundation/hexdump.h>
38 #include <media/stagefright/AudioSource.h>
39 #include <media/stagefright/DataSource.h>
40 #include <media/stagefright/MediaDefs.h>
41 #include <media/stagefright/MediaErrors.h>
42 #include <media/stagefright/MediaSource.h>
43 #include <media/stagefright/MetaData.h>
44 #include <media/stagefright/NuMediaExtractor.h>
45 #include <media/stagefright/SurfaceMediaSource.h>
46 #include <media/stagefright/Utils.h>
47
48 #include <OMX_IVCommon.h>
49
50 namespace android {
51
52 struct WifiDisplaySource::PlaybackSession::Track : public AHandler {
53 enum {
54 kWhatStopped,
55 };
56
57 Track(const sp<AMessage> ¬ify,
58 const sp<ALooper> &pullLooper,
59 const sp<ALooper> &codecLooper,
60 const sp<MediaPuller> &mediaPuller,
61 const sp<Converter> &converter);
62
63 Track(const sp<AMessage> ¬ify, const sp<AMessage> &format);
64
65 void setRepeaterSource(const sp<RepeaterSource> &source);
66
67 sp<AMessage> getFormat();
68 bool isAudio() const;
69
70 const sp<Converter> &converter() const;
71 const sp<RepeaterSource> &repeaterSource() const;
72
73 ssize_t mediaSenderTrackIndex() const;
74 void setMediaSenderTrackIndex(size_t index);
75
76 status_t start();
77 void stopAsync();
78
79 void pause();
80 void resume();
81
82 void queueAccessUnit(const sp<ABuffer> &accessUnit);
83 sp<ABuffer> dequeueAccessUnit();
84
85 bool hasOutputBuffer(int64_t *timeUs) const;
86 void queueOutputBuffer(const sp<ABuffer> &accessUnit);
87 sp<ABuffer> dequeueOutputBuffer();
88
89 #if SUSPEND_VIDEO_IF_IDLE
90 bool isSuspended() const;
91 #endif
92
countQueuedOutputBuffersandroid::WifiDisplaySource::PlaybackSession::Track93 size_t countQueuedOutputBuffers() const {
94 return mQueuedOutputBuffers.size();
95 }
96
97 void requestIDRFrame();
98
99 protected:
100 virtual void onMessageReceived(const sp<AMessage> &msg);
101 virtual ~Track();
102
103 private:
104 enum {
105 kWhatMediaPullerStopped,
106 };
107
108 sp<AMessage> mNotify;
109 sp<ALooper> mPullLooper;
110 sp<ALooper> mCodecLooper;
111 sp<MediaPuller> mMediaPuller;
112 sp<Converter> mConverter;
113 sp<AMessage> mFormat;
114 bool mStarted;
115 ssize_t mMediaSenderTrackIndex;
116 bool mIsAudio;
117 List<sp<ABuffer> > mQueuedAccessUnits;
118 sp<RepeaterSource> mRepeaterSource;
119 List<sp<ABuffer> > mQueuedOutputBuffers;
120 int64_t mLastOutputBufferQueuedTimeUs;
121
122 static bool IsAudioFormat(const sp<AMessage> &format);
123
124 DISALLOW_EVIL_CONSTRUCTORS(Track);
125 };
126
Track(const sp<AMessage> & notify,const sp<ALooper> & pullLooper,const sp<ALooper> & codecLooper,const sp<MediaPuller> & mediaPuller,const sp<Converter> & converter)127 WifiDisplaySource::PlaybackSession::Track::Track(
128 const sp<AMessage> ¬ify,
129 const sp<ALooper> &pullLooper,
130 const sp<ALooper> &codecLooper,
131 const sp<MediaPuller> &mediaPuller,
132 const sp<Converter> &converter)
133 : mNotify(notify),
134 mPullLooper(pullLooper),
135 mCodecLooper(codecLooper),
136 mMediaPuller(mediaPuller),
137 mConverter(converter),
138 mStarted(false),
139 mIsAudio(IsAudioFormat(mConverter->getOutputFormat())),
140 mLastOutputBufferQueuedTimeUs(-1ll) {
141 }
142
Track(const sp<AMessage> & notify,const sp<AMessage> & format)143 WifiDisplaySource::PlaybackSession::Track::Track(
144 const sp<AMessage> ¬ify, const sp<AMessage> &format)
145 : mNotify(notify),
146 mFormat(format),
147 mStarted(false),
148 mIsAudio(IsAudioFormat(format)),
149 mLastOutputBufferQueuedTimeUs(-1ll) {
150 }
151
~Track()152 WifiDisplaySource::PlaybackSession::Track::~Track() {
153 CHECK(!mStarted);
154 }
155
156 // static
IsAudioFormat(const sp<AMessage> & format)157 bool WifiDisplaySource::PlaybackSession::Track::IsAudioFormat(
158 const sp<AMessage> &format) {
159 AString mime;
160 CHECK(format->findString("mime", &mime));
161
162 return !strncasecmp(mime.c_str(), "audio/", 6);
163 }
164
getFormat()165 sp<AMessage> WifiDisplaySource::PlaybackSession::Track::getFormat() {
166 return mFormat != NULL ? mFormat : mConverter->getOutputFormat();
167 }
168
isAudio() const169 bool WifiDisplaySource::PlaybackSession::Track::isAudio() const {
170 return mIsAudio;
171 }
172
converter() const173 const sp<Converter> &WifiDisplaySource::PlaybackSession::Track::converter() const {
174 return mConverter;
175 }
176
177 const sp<RepeaterSource> &
repeaterSource() const178 WifiDisplaySource::PlaybackSession::Track::repeaterSource() const {
179 return mRepeaterSource;
180 }
181
mediaSenderTrackIndex() const182 ssize_t WifiDisplaySource::PlaybackSession::Track::mediaSenderTrackIndex() const {
183 CHECK_GE(mMediaSenderTrackIndex, 0);
184 return mMediaSenderTrackIndex;
185 }
186
setMediaSenderTrackIndex(size_t index)187 void WifiDisplaySource::PlaybackSession::Track::setMediaSenderTrackIndex(
188 size_t index) {
189 mMediaSenderTrackIndex = index;
190 }
191
start()192 status_t WifiDisplaySource::PlaybackSession::Track::start() {
193 ALOGV("Track::start isAudio=%d", mIsAudio);
194
195 CHECK(!mStarted);
196
197 status_t err = OK;
198
199 if (mMediaPuller != NULL) {
200 err = mMediaPuller->start();
201 }
202
203 if (err == OK) {
204 mStarted = true;
205 }
206
207 return err;
208 }
209
stopAsync()210 void WifiDisplaySource::PlaybackSession::Track::stopAsync() {
211 ALOGV("Track::stopAsync isAudio=%d", mIsAudio);
212
213 if (mConverter != NULL) {
214 mConverter->shutdownAsync();
215 }
216
217 sp<AMessage> msg = new AMessage(kWhatMediaPullerStopped, this);
218
219 if (mStarted && mMediaPuller != NULL) {
220 if (mRepeaterSource != NULL) {
221 // Let's unblock MediaPuller's MediaSource::read().
222 mRepeaterSource->wakeUp();
223 }
224
225 mMediaPuller->stopAsync(msg);
226 } else {
227 mStarted = false;
228 msg->post();
229 }
230 }
231
pause()232 void WifiDisplaySource::PlaybackSession::Track::pause() {
233 mMediaPuller->pause();
234 }
235
resume()236 void WifiDisplaySource::PlaybackSession::Track::resume() {
237 mMediaPuller->resume();
238 }
239
onMessageReceived(const sp<AMessage> & msg)240 void WifiDisplaySource::PlaybackSession::Track::onMessageReceived(
241 const sp<AMessage> &msg) {
242 switch (msg->what()) {
243 case kWhatMediaPullerStopped:
244 {
245 mConverter.clear();
246
247 mStarted = false;
248
249 sp<AMessage> notify = mNotify->dup();
250 notify->setInt32("what", kWhatStopped);
251 notify->post();
252
253 ALOGI("kWhatStopped %s posted", mIsAudio ? "audio" : "video");
254 break;
255 }
256
257 default:
258 TRESPASS();
259 }
260 }
261
queueAccessUnit(const sp<ABuffer> & accessUnit)262 void WifiDisplaySource::PlaybackSession::Track::queueAccessUnit(
263 const sp<ABuffer> &accessUnit) {
264 mQueuedAccessUnits.push_back(accessUnit);
265 }
266
dequeueAccessUnit()267 sp<ABuffer> WifiDisplaySource::PlaybackSession::Track::dequeueAccessUnit() {
268 if (mQueuedAccessUnits.empty()) {
269 return NULL;
270 }
271
272 sp<ABuffer> accessUnit = *mQueuedAccessUnits.begin();
273 CHECK(accessUnit != NULL);
274
275 mQueuedAccessUnits.erase(mQueuedAccessUnits.begin());
276
277 return accessUnit;
278 }
279
setRepeaterSource(const sp<RepeaterSource> & source)280 void WifiDisplaySource::PlaybackSession::Track::setRepeaterSource(
281 const sp<RepeaterSource> &source) {
282 mRepeaterSource = source;
283 }
284
requestIDRFrame()285 void WifiDisplaySource::PlaybackSession::Track::requestIDRFrame() {
286 if (mIsAudio) {
287 return;
288 }
289
290 if (mRepeaterSource != NULL) {
291 mRepeaterSource->wakeUp();
292 }
293
294 mConverter->requestIDRFrame();
295 }
296
hasOutputBuffer(int64_t * timeUs) const297 bool WifiDisplaySource::PlaybackSession::Track::hasOutputBuffer(
298 int64_t *timeUs) const {
299 *timeUs = 0ll;
300
301 if (mQueuedOutputBuffers.empty()) {
302 return false;
303 }
304
305 const sp<ABuffer> &outputBuffer = *mQueuedOutputBuffers.begin();
306
307 CHECK(outputBuffer->meta()->findInt64("timeUs", timeUs));
308
309 return true;
310 }
311
queueOutputBuffer(const sp<ABuffer> & accessUnit)312 void WifiDisplaySource::PlaybackSession::Track::queueOutputBuffer(
313 const sp<ABuffer> &accessUnit) {
314 mQueuedOutputBuffers.push_back(accessUnit);
315 mLastOutputBufferQueuedTimeUs = ALooper::GetNowUs();
316 }
317
dequeueOutputBuffer()318 sp<ABuffer> WifiDisplaySource::PlaybackSession::Track::dequeueOutputBuffer() {
319 CHECK(!mQueuedOutputBuffers.empty());
320
321 sp<ABuffer> outputBuffer = *mQueuedOutputBuffers.begin();
322 mQueuedOutputBuffers.erase(mQueuedOutputBuffers.begin());
323
324 return outputBuffer;
325 }
326
327 #if SUSPEND_VIDEO_IF_IDLE
isSuspended() const328 bool WifiDisplaySource::PlaybackSession::Track::isSuspended() const {
329 if (!mQueuedOutputBuffers.empty()) {
330 return false;
331 }
332
333 if (mLastOutputBufferQueuedTimeUs < 0ll) {
334 // We've never seen an output buffer queued, but tracks start
335 // out live, not suspended.
336 return false;
337 }
338
339 // If we've not seen new output data for 60ms or more, we consider
340 // this track suspended for the time being.
341 return (ALooper::GetNowUs() - mLastOutputBufferQueuedTimeUs) > 60000ll;
342 }
343 #endif
344
345 ////////////////////////////////////////////////////////////////////////////////
346
PlaybackSession(const String16 & opPackageName,const sp<ANetworkSession> & netSession,const sp<AMessage> & notify,const in_addr & interfaceAddr,const sp<IHDCP> & hdcp,const char * path)347 WifiDisplaySource::PlaybackSession::PlaybackSession(
348 const String16 &opPackageName,
349 const sp<ANetworkSession> &netSession,
350 const sp<AMessage> ¬ify,
351 const in_addr &interfaceAddr,
352 const sp<IHDCP> &hdcp,
353 const char *path)
354 : mOpPackageName(opPackageName),
355 mNetSession(netSession),
356 mNotify(notify),
357 mInterfaceAddr(interfaceAddr),
358 mHDCP(hdcp),
359 mLocalRTPPort(-1),
360 mWeAreDead(false),
361 mPaused(false),
362 mLastLifesignUs(),
363 mVideoTrackIndex(-1),
364 mPrevTimeUs(-1ll),
365 mPullExtractorPending(false),
366 mPullExtractorGeneration(0),
367 mFirstSampleTimeRealUs(-1ll),
368 mFirstSampleTimeUs(-1ll) {
369 if (path != NULL) {
370 mMediaPath.setTo(path);
371 }
372 }
373
init(const char * clientIP,int32_t clientRtp,RTPSender::TransportMode rtpMode,int32_t clientRtcp,RTPSender::TransportMode rtcpMode,bool enableAudio,bool usePCMAudio,bool enableVideo,VideoFormats::ResolutionType videoResolutionType,size_t videoResolutionIndex,VideoFormats::ProfileType videoProfileType,VideoFormats::LevelType videoLevelType)374 status_t WifiDisplaySource::PlaybackSession::init(
375 const char *clientIP,
376 int32_t clientRtp,
377 RTPSender::TransportMode rtpMode,
378 int32_t clientRtcp,
379 RTPSender::TransportMode rtcpMode,
380 bool enableAudio,
381 bool usePCMAudio,
382 bool enableVideo,
383 VideoFormats::ResolutionType videoResolutionType,
384 size_t videoResolutionIndex,
385 VideoFormats::ProfileType videoProfileType,
386 VideoFormats::LevelType videoLevelType) {
387 sp<AMessage> notify = new AMessage(kWhatMediaSenderNotify, this);
388 mMediaSender = new MediaSender(mNetSession, notify);
389 looper()->registerHandler(mMediaSender);
390
391 mMediaSender->setHDCP(mHDCP);
392
393 status_t err = setupPacketizer(
394 enableAudio,
395 usePCMAudio,
396 enableVideo,
397 videoResolutionType,
398 videoResolutionIndex,
399 videoProfileType,
400 videoLevelType);
401
402 if (err == OK) {
403 err = mMediaSender->initAsync(
404 -1 /* trackIndex */,
405 clientIP,
406 clientRtp,
407 rtpMode,
408 clientRtcp,
409 rtcpMode,
410 &mLocalRTPPort);
411 }
412
413 if (err != OK) {
414 mLocalRTPPort = -1;
415
416 looper()->unregisterHandler(mMediaSender->id());
417 mMediaSender.clear();
418
419 return err;
420 }
421
422 updateLiveness();
423
424 return OK;
425 }
426
~PlaybackSession()427 WifiDisplaySource::PlaybackSession::~PlaybackSession() {
428 }
429
getRTPPort() const430 int32_t WifiDisplaySource::PlaybackSession::getRTPPort() const {
431 return mLocalRTPPort;
432 }
433
getLastLifesignUs() const434 int64_t WifiDisplaySource::PlaybackSession::getLastLifesignUs() const {
435 return mLastLifesignUs;
436 }
437
updateLiveness()438 void WifiDisplaySource::PlaybackSession::updateLiveness() {
439 mLastLifesignUs = ALooper::GetNowUs();
440 }
441
play()442 status_t WifiDisplaySource::PlaybackSession::play() {
443 updateLiveness();
444
445 (new AMessage(kWhatResume, this))->post();
446
447 return OK;
448 }
449
onMediaSenderInitialized()450 status_t WifiDisplaySource::PlaybackSession::onMediaSenderInitialized() {
451 for (size_t i = 0; i < mTracks.size(); ++i) {
452 CHECK_EQ((status_t)OK, mTracks.editValueAt(i)->start());
453 }
454
455 sp<AMessage> notify = mNotify->dup();
456 notify->setInt32("what", kWhatSessionEstablished);
457 notify->post();
458
459 return OK;
460 }
461
pause()462 status_t WifiDisplaySource::PlaybackSession::pause() {
463 updateLiveness();
464
465 (new AMessage(kWhatPause, this))->post();
466
467 return OK;
468 }
469
destroyAsync()470 void WifiDisplaySource::PlaybackSession::destroyAsync() {
471 ALOGI("destroyAsync");
472
473 for (size_t i = 0; i < mTracks.size(); ++i) {
474 mTracks.valueAt(i)->stopAsync();
475 }
476 }
477
onMessageReceived(const sp<AMessage> & msg)478 void WifiDisplaySource::PlaybackSession::onMessageReceived(
479 const sp<AMessage> &msg) {
480 switch (msg->what()) {
481 case kWhatConverterNotify:
482 {
483 if (mWeAreDead) {
484 ALOGV("dropping msg '%s' because we're dead",
485 msg->debugString().c_str());
486
487 break;
488 }
489
490 int32_t what;
491 CHECK(msg->findInt32("what", &what));
492
493 size_t trackIndex;
494 CHECK(msg->findSize("trackIndex", &trackIndex));
495
496 if (what == Converter::kWhatAccessUnit) {
497 sp<ABuffer> accessUnit;
498 CHECK(msg->findBuffer("accessUnit", &accessUnit));
499
500 const sp<Track> &track = mTracks.valueFor(trackIndex);
501
502 status_t err = mMediaSender->queueAccessUnit(
503 track->mediaSenderTrackIndex(),
504 accessUnit);
505
506 if (err != OK) {
507 notifySessionDead();
508 }
509 break;
510 } else if (what == Converter::kWhatEOS) {
511 CHECK_EQ(what, Converter::kWhatEOS);
512
513 ALOGI("output EOS on track %zu", trackIndex);
514
515 ssize_t index = mTracks.indexOfKey(trackIndex);
516 CHECK_GE(index, 0);
517
518 const sp<Converter> &converter =
519 mTracks.valueAt(index)->converter();
520 looper()->unregisterHandler(converter->id());
521
522 mTracks.removeItemsAt(index);
523
524 if (mTracks.isEmpty()) {
525 ALOGI("Reached EOS");
526 }
527 } else if (what != Converter::kWhatShutdownCompleted) {
528 CHECK_EQ(what, Converter::kWhatError);
529
530 status_t err;
531 CHECK(msg->findInt32("err", &err));
532
533 ALOGE("converter signaled error %d", err);
534
535 notifySessionDead();
536 }
537 break;
538 }
539
540 case kWhatMediaSenderNotify:
541 {
542 int32_t what;
543 CHECK(msg->findInt32("what", &what));
544
545 if (what == MediaSender::kWhatInitDone) {
546 status_t err;
547 CHECK(msg->findInt32("err", &err));
548
549 if (err == OK) {
550 onMediaSenderInitialized();
551 } else {
552 notifySessionDead();
553 }
554 } else if (what == MediaSender::kWhatError) {
555 notifySessionDead();
556 } else if (what == MediaSender::kWhatNetworkStall) {
557 size_t numBytesQueued;
558 CHECK(msg->findSize("numBytesQueued", &numBytesQueued));
559
560 if (mVideoTrackIndex >= 0) {
561 const sp<Track> &videoTrack =
562 mTracks.valueFor(mVideoTrackIndex);
563
564 sp<Converter> converter = videoTrack->converter();
565 if (converter != NULL) {
566 converter->dropAFrame();
567 }
568 }
569 } else if (what == MediaSender::kWhatInformSender) {
570 onSinkFeedback(msg);
571 } else {
572 TRESPASS();
573 }
574 break;
575 }
576
577 case kWhatTrackNotify:
578 {
579 int32_t what;
580 CHECK(msg->findInt32("what", &what));
581
582 size_t trackIndex;
583 CHECK(msg->findSize("trackIndex", &trackIndex));
584
585 if (what == Track::kWhatStopped) {
586 ALOGI("Track %zu stopped", trackIndex);
587
588 sp<Track> track = mTracks.valueFor(trackIndex);
589 looper()->unregisterHandler(track->id());
590 mTracks.removeItem(trackIndex);
591 track.clear();
592
593 if (!mTracks.isEmpty()) {
594 ALOGI("not all tracks are stopped yet");
595 break;
596 }
597
598 looper()->unregisterHandler(mMediaSender->id());
599 mMediaSender.clear();
600
601 sp<AMessage> notify = mNotify->dup();
602 notify->setInt32("what", kWhatSessionDestroyed);
603 notify->post();
604 }
605 break;
606 }
607
608 case kWhatPause:
609 {
610 if (mExtractor != NULL) {
611 ++mPullExtractorGeneration;
612 mFirstSampleTimeRealUs = -1ll;
613 mFirstSampleTimeUs = -1ll;
614 }
615
616 if (mPaused) {
617 break;
618 }
619
620 for (size_t i = 0; i < mTracks.size(); ++i) {
621 mTracks.editValueAt(i)->pause();
622 }
623
624 mPaused = true;
625 break;
626 }
627
628 case kWhatResume:
629 {
630 if (mExtractor != NULL) {
631 schedulePullExtractor();
632 }
633
634 if (!mPaused) {
635 break;
636 }
637
638 for (size_t i = 0; i < mTracks.size(); ++i) {
639 mTracks.editValueAt(i)->resume();
640 }
641
642 mPaused = false;
643 break;
644 }
645
646 case kWhatPullExtractorSample:
647 {
648 int32_t generation;
649 CHECK(msg->findInt32("generation", &generation));
650
651 if (generation != mPullExtractorGeneration) {
652 break;
653 }
654
655 mPullExtractorPending = false;
656
657 onPullExtractor();
658 break;
659 }
660
661 default:
662 TRESPASS();
663 }
664 }
665
onSinkFeedback(const sp<AMessage> & msg)666 void WifiDisplaySource::PlaybackSession::onSinkFeedback(const sp<AMessage> &msg) {
667 int64_t avgLatencyUs;
668 CHECK(msg->findInt64("avgLatencyUs", &avgLatencyUs));
669
670 int64_t maxLatencyUs;
671 CHECK(msg->findInt64("maxLatencyUs", &maxLatencyUs));
672
673 ALOGI("sink reports avg. latency of %lld ms (max %lld ms)",
674 avgLatencyUs / 1000ll,
675 maxLatencyUs / 1000ll);
676
677 if (mVideoTrackIndex >= 0) {
678 const sp<Track> &videoTrack = mTracks.valueFor(mVideoTrackIndex);
679 sp<Converter> converter = videoTrack->converter();
680
681 if (converter != NULL) {
682 int32_t videoBitrate =
683 Converter::GetInt32Property("media.wfd.video-bitrate", -1);
684
685 char val[PROPERTY_VALUE_MAX];
686 if (videoBitrate < 0
687 && property_get("media.wfd.video-bitrate", val, NULL)
688 && !strcasecmp("adaptive", val)) {
689 videoBitrate = converter->getVideoBitrate();
690
691 if (avgLatencyUs > 300000ll) {
692 videoBitrate *= 0.6;
693 } else if (avgLatencyUs < 100000ll) {
694 videoBitrate *= 1.1;
695 }
696 }
697
698 if (videoBitrate > 0) {
699 if (videoBitrate < 500000) {
700 videoBitrate = 500000;
701 } else if (videoBitrate > 10000000) {
702 videoBitrate = 10000000;
703 }
704
705 if (videoBitrate != converter->getVideoBitrate()) {
706 ALOGI("setting video bitrate to %d bps", videoBitrate);
707
708 converter->setVideoBitrate(videoBitrate);
709 }
710 }
711 }
712
713 sp<RepeaterSource> repeaterSource = videoTrack->repeaterSource();
714 if (repeaterSource != NULL) {
715 double rateHz =
716 Converter::GetInt32Property(
717 "media.wfd.video-framerate", -1);
718
719 char val[PROPERTY_VALUE_MAX];
720 if (rateHz < 0.0
721 && property_get("media.wfd.video-framerate", val, NULL)
722 && !strcasecmp("adaptive", val)) {
723 rateHz = repeaterSource->getFrameRate();
724
725 if (avgLatencyUs > 300000ll) {
726 rateHz *= 0.9;
727 } else if (avgLatencyUs < 200000ll) {
728 rateHz *= 1.1;
729 }
730 }
731
732 if (rateHz > 0) {
733 if (rateHz < 5.0) {
734 rateHz = 5.0;
735 } else if (rateHz > 30.0) {
736 rateHz = 30.0;
737 }
738
739 if (rateHz != repeaterSource->getFrameRate()) {
740 ALOGI("setting frame rate to %.2f Hz", rateHz);
741
742 repeaterSource->setFrameRate(rateHz);
743 }
744 }
745 }
746 }
747 }
748
setupMediaPacketizer(bool enableAudio,bool enableVideo)749 status_t WifiDisplaySource::PlaybackSession::setupMediaPacketizer(
750 bool enableAudio, bool enableVideo) {
751 DataSource::RegisterDefaultSniffers();
752
753 mExtractor = new NuMediaExtractor;
754
755 status_t err = mExtractor->setDataSource(
756 NULL /* httpService */, mMediaPath.c_str());
757
758 if (err != OK) {
759 return err;
760 }
761
762 size_t n = mExtractor->countTracks();
763 bool haveAudio = false;
764 bool haveVideo = false;
765 for (size_t i = 0; i < n; ++i) {
766 sp<AMessage> format;
767 err = mExtractor->getTrackFormat(i, &format);
768
769 if (err != OK) {
770 continue;
771 }
772
773 AString mime;
774 CHECK(format->findString("mime", &mime));
775
776 bool isAudio = !strncasecmp(mime.c_str(), "audio/", 6);
777 bool isVideo = !strncasecmp(mime.c_str(), "video/", 6);
778
779 if (isAudio && enableAudio && !haveAudio) {
780 haveAudio = true;
781 } else if (isVideo && enableVideo && !haveVideo) {
782 haveVideo = true;
783 } else {
784 continue;
785 }
786
787 err = mExtractor->selectTrack(i);
788
789 size_t trackIndex = mTracks.size();
790
791 sp<AMessage> notify = new AMessage(kWhatTrackNotify, this);
792 notify->setSize("trackIndex", trackIndex);
793
794 sp<Track> track = new Track(notify, format);
795 looper()->registerHandler(track);
796
797 mTracks.add(trackIndex, track);
798
799 mExtractorTrackToInternalTrack.add(i, trackIndex);
800
801 if (isVideo) {
802 mVideoTrackIndex = trackIndex;
803 }
804
805 uint32_t flags = MediaSender::FLAG_MANUALLY_PREPEND_SPS_PPS;
806
807 ssize_t mediaSenderTrackIndex =
808 mMediaSender->addTrack(format, flags);
809 CHECK_GE(mediaSenderTrackIndex, 0);
810
811 track->setMediaSenderTrackIndex(mediaSenderTrackIndex);
812
813 if ((haveAudio || !enableAudio) && (haveVideo || !enableVideo)) {
814 break;
815 }
816 }
817
818 return OK;
819 }
820
schedulePullExtractor()821 void WifiDisplaySource::PlaybackSession::schedulePullExtractor() {
822 if (mPullExtractorPending) {
823 return;
824 }
825
826 int64_t delayUs = 1000000; // default delay is 1 sec
827 int64_t sampleTimeUs;
828 status_t err = mExtractor->getSampleTime(&sampleTimeUs);
829
830 if (err == OK) {
831 int64_t nowUs = ALooper::GetNowUs();
832
833 if (mFirstSampleTimeRealUs < 0ll) {
834 mFirstSampleTimeRealUs = nowUs;
835 mFirstSampleTimeUs = sampleTimeUs;
836 }
837
838 int64_t whenUs = sampleTimeUs - mFirstSampleTimeUs + mFirstSampleTimeRealUs;
839 delayUs = whenUs - nowUs;
840 } else {
841 ALOGW("could not get sample time (%d)", err);
842 }
843
844 sp<AMessage> msg = new AMessage(kWhatPullExtractorSample, this);
845 msg->setInt32("generation", mPullExtractorGeneration);
846 msg->post(delayUs);
847
848 mPullExtractorPending = true;
849 }
850
onPullExtractor()851 void WifiDisplaySource::PlaybackSession::onPullExtractor() {
852 sp<ABuffer> accessUnit = new ABuffer(1024 * 1024);
853 status_t err = mExtractor->readSampleData(accessUnit);
854 if (err != OK) {
855 // EOS.
856 return;
857 }
858
859 int64_t timeUs;
860 CHECK_EQ((status_t)OK, mExtractor->getSampleTime(&timeUs));
861
862 accessUnit->meta()->setInt64(
863 "timeUs", mFirstSampleTimeRealUs + timeUs - mFirstSampleTimeUs);
864
865 size_t trackIndex;
866 CHECK_EQ((status_t)OK, mExtractor->getSampleTrackIndex(&trackIndex));
867
868 sp<AMessage> msg = new AMessage(kWhatConverterNotify, this);
869
870 msg->setSize(
871 "trackIndex", mExtractorTrackToInternalTrack.valueFor(trackIndex));
872
873 msg->setInt32("what", Converter::kWhatAccessUnit);
874 msg->setBuffer("accessUnit", accessUnit);
875 msg->post();
876
877 mExtractor->advance();
878
879 schedulePullExtractor();
880 }
881
setupPacketizer(bool enableAudio,bool usePCMAudio,bool enableVideo,VideoFormats::ResolutionType videoResolutionType,size_t videoResolutionIndex,VideoFormats::ProfileType videoProfileType,VideoFormats::LevelType videoLevelType)882 status_t WifiDisplaySource::PlaybackSession::setupPacketizer(
883 bool enableAudio,
884 bool usePCMAudio,
885 bool enableVideo,
886 VideoFormats::ResolutionType videoResolutionType,
887 size_t videoResolutionIndex,
888 VideoFormats::ProfileType videoProfileType,
889 VideoFormats::LevelType videoLevelType) {
890 CHECK(enableAudio || enableVideo);
891
892 if (!mMediaPath.empty()) {
893 return setupMediaPacketizer(enableAudio, enableVideo);
894 }
895
896 if (enableVideo) {
897 status_t err = addVideoSource(
898 videoResolutionType, videoResolutionIndex, videoProfileType,
899 videoLevelType);
900
901 if (err != OK) {
902 return err;
903 }
904 }
905
906 if (!enableAudio) {
907 return OK;
908 }
909
910 return addAudioSource(usePCMAudio);
911 }
912
addSource(bool isVideo,const sp<MediaSource> & source,bool isRepeaterSource,bool usePCMAudio,unsigned profileIdc,unsigned levelIdc,unsigned constraintSet,size_t * numInputBuffers)913 status_t WifiDisplaySource::PlaybackSession::addSource(
914 bool isVideo, const sp<MediaSource> &source, bool isRepeaterSource,
915 bool usePCMAudio, unsigned profileIdc, unsigned levelIdc,
916 unsigned constraintSet, size_t *numInputBuffers) {
917 CHECK(!usePCMAudio || !isVideo);
918 CHECK(!isRepeaterSource || isVideo);
919 CHECK(!profileIdc || isVideo);
920 CHECK(!levelIdc || isVideo);
921 CHECK(!constraintSet || isVideo);
922
923 sp<ALooper> pullLooper = new ALooper;
924 pullLooper->setName("pull_looper");
925
926 pullLooper->start(
927 false /* runOnCallingThread */,
928 false /* canCallJava */,
929 PRIORITY_AUDIO);
930
931 sp<ALooper> codecLooper = new ALooper;
932 codecLooper->setName("codec_looper");
933
934 codecLooper->start(
935 false /* runOnCallingThread */,
936 false /* canCallJava */,
937 PRIORITY_AUDIO);
938
939 size_t trackIndex;
940
941 sp<AMessage> notify;
942
943 trackIndex = mTracks.size();
944
945 sp<AMessage> format;
946 status_t err = convertMetaDataToMessage(source->getFormat(), &format);
947 CHECK_EQ(err, (status_t)OK);
948
949 if (isVideo) {
950 format->setString("mime", MEDIA_MIMETYPE_VIDEO_AVC);
951 format->setInt32(
952 "android._input-metadata-buffer-type", kMetadataBufferTypeANWBuffer);
953 format->setInt32("android._store-metadata-in-buffers-output", (mHDCP != NULL)
954 && (mHDCP->getCaps() & HDCPModule::HDCP_CAPS_ENCRYPT_NATIVE));
955 format->setInt32(
956 "color-format", OMX_COLOR_FormatAndroidOpaque);
957 format->setInt32("profile-idc", profileIdc);
958 format->setInt32("level-idc", levelIdc);
959 format->setInt32("constraint-set", constraintSet);
960 } else {
961 if (usePCMAudio) {
962 format->setInt32("pcm-encoding", kAudioEncodingPcm16bit);
963 format->setString("mime", MEDIA_MIMETYPE_AUDIO_RAW);
964 } else {
965 format->setString("mime", MEDIA_MIMETYPE_AUDIO_AAC);
966 }
967 }
968
969 notify = new AMessage(kWhatConverterNotify, this);
970 notify->setSize("trackIndex", trackIndex);
971
972 sp<Converter> converter = new Converter(notify, codecLooper, format);
973
974 looper()->registerHandler(converter);
975
976 err = converter->init();
977 if (err != OK) {
978 ALOGE("%s converter returned err %d", isVideo ? "video" : "audio", err);
979
980 looper()->unregisterHandler(converter->id());
981 return err;
982 }
983
984 notify = new AMessage(Converter::kWhatMediaPullerNotify, converter);
985 notify->setSize("trackIndex", trackIndex);
986
987 sp<MediaPuller> puller = new MediaPuller(source, notify);
988 pullLooper->registerHandler(puller);
989
990 if (numInputBuffers != NULL) {
991 *numInputBuffers = converter->getInputBufferCount();
992 }
993
994 notify = new AMessage(kWhatTrackNotify, this);
995 notify->setSize("trackIndex", trackIndex);
996
997 sp<Track> track = new Track(
998 notify, pullLooper, codecLooper, puller, converter);
999
1000 if (isRepeaterSource) {
1001 track->setRepeaterSource(static_cast<RepeaterSource *>(source.get()));
1002 }
1003
1004 looper()->registerHandler(track);
1005
1006 mTracks.add(trackIndex, track);
1007
1008 if (isVideo) {
1009 mVideoTrackIndex = trackIndex;
1010 }
1011
1012 uint32_t flags = 0;
1013 if (converter->needToManuallyPrependSPSPPS()) {
1014 flags |= MediaSender::FLAG_MANUALLY_PREPEND_SPS_PPS;
1015 }
1016
1017 ssize_t mediaSenderTrackIndex =
1018 mMediaSender->addTrack(converter->getOutputFormat(), flags);
1019 CHECK_GE(mediaSenderTrackIndex, 0);
1020
1021 track->setMediaSenderTrackIndex(mediaSenderTrackIndex);
1022
1023 return OK;
1024 }
1025
addVideoSource(VideoFormats::ResolutionType videoResolutionType,size_t videoResolutionIndex,VideoFormats::ProfileType videoProfileType,VideoFormats::LevelType videoLevelType)1026 status_t WifiDisplaySource::PlaybackSession::addVideoSource(
1027 VideoFormats::ResolutionType videoResolutionType,
1028 size_t videoResolutionIndex,
1029 VideoFormats::ProfileType videoProfileType,
1030 VideoFormats::LevelType videoLevelType) {
1031 size_t width, height, framesPerSecond;
1032 bool interlaced;
1033 CHECK(VideoFormats::GetConfiguration(
1034 videoResolutionType,
1035 videoResolutionIndex,
1036 &width,
1037 &height,
1038 &framesPerSecond,
1039 &interlaced));
1040
1041 unsigned profileIdc, levelIdc, constraintSet;
1042 CHECK(VideoFormats::GetProfileLevel(
1043 videoProfileType,
1044 videoLevelType,
1045 &profileIdc,
1046 &levelIdc,
1047 &constraintSet));
1048
1049 sp<SurfaceMediaSource> source = new SurfaceMediaSource(width, height);
1050
1051 source->setUseAbsoluteTimestamps();
1052
1053 sp<RepeaterSource> videoSource =
1054 new RepeaterSource(source, framesPerSecond);
1055
1056 size_t numInputBuffers;
1057 status_t err = addSource(
1058 true /* isVideo */, videoSource, true /* isRepeaterSource */,
1059 false /* usePCMAudio */, profileIdc, levelIdc, constraintSet,
1060 &numInputBuffers);
1061
1062 if (err != OK) {
1063 return err;
1064 }
1065
1066 err = source->setMaxAcquiredBufferCount(numInputBuffers);
1067 CHECK_EQ(err, (status_t)OK);
1068
1069 mProducer = source->getProducer();
1070
1071 return OK;
1072 }
1073
addAudioSource(bool usePCMAudio)1074 status_t WifiDisplaySource::PlaybackSession::addAudioSource(bool usePCMAudio) {
1075 sp<AudioSource> audioSource = new AudioSource(
1076 AUDIO_SOURCE_REMOTE_SUBMIX,
1077 mOpPackageName,
1078 48000 /* sampleRate */,
1079 2 /* channelCount */);
1080
1081 if (audioSource->initCheck() == OK) {
1082 return addSource(
1083 false /* isVideo */, audioSource, false /* isRepeaterSource */,
1084 usePCMAudio, 0 /* profileIdc */, 0 /* levelIdc */,
1085 0 /* constraintSet */, NULL /* numInputBuffers */);
1086 }
1087
1088 ALOGW("Unable to instantiate audio source");
1089
1090 return OK;
1091 }
1092
getSurfaceTexture()1093 sp<IGraphicBufferProducer> WifiDisplaySource::PlaybackSession::getSurfaceTexture() {
1094 return mProducer;
1095 }
1096
requestIDRFrame()1097 void WifiDisplaySource::PlaybackSession::requestIDRFrame() {
1098 for (size_t i = 0; i < mTracks.size(); ++i) {
1099 const sp<Track> &track = mTracks.valueAt(i);
1100
1101 track->requestIDRFrame();
1102 }
1103 }
1104
notifySessionDead()1105 void WifiDisplaySource::PlaybackSession::notifySessionDead() {
1106 // Inform WifiDisplaySource of our premature death (wish).
1107 sp<AMessage> notify = mNotify->dup();
1108 notify->setInt32("what", kWhatSessionDead);
1109 notify->post();
1110
1111 mWeAreDead = true;
1112 }
1113
1114 } // namespace android
1115
1116