• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2010 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 //#define LOG_NDEBUG 0
18 #define LOG_TAG "RTPSource"
19 #include <utils/Log.h>
20 
21 #include "RTPSource.h"
22 
23 
24 
25 
26 #include <media/stagefright/MediaDefs.h>
27 #include <media/stagefright/MetaData.h>
28 #include <string.h>
29 
30 namespace android {
31 
32 const int64_t kNearEOSTimeoutUs = 2000000ll; // 2 secs
33 static int32_t kMaxAllowedStaleAccessUnits = 20;
34 
RTPSource(const sp<AMessage> & notify,const String8 & rtpParams)35 NuPlayer::RTPSource::RTPSource(
36         const sp<AMessage> &notify,
37         const String8& rtpParams)
38     : Source(notify),
39       mRTPParams(rtpParams),
40       mFlags(0),
41       mState(DISCONNECTED),
42       mFinalResult(OK),
43       mBuffering(false),
44       mInPreparationPhase(true),
45       mRTPConn(new ARTPConnection(ARTPConnection::kViLTEConnection)),
46       mEOSTimeoutAudio(0),
47       mEOSTimeoutVideo(0),
48       mFirstAccessUnit(true),
49       mAllTracksHaveTime(false),
50       mNTPAnchorUs(-1),
51       mMediaAnchorUs(-1),
52       mLastMediaTimeUs(-1),
53       mNumAccessUnitsReceived(0),
54       mLastCVOUpdated(-1),
55       mReceivedFirstRTCPPacket(false),
56       mReceivedFirstRTPPacket(false),
57       mPausing(false),
58       mPauseGeneration(0) {
59     ALOGD("RTPSource initialized with rtpParams=%s", rtpParams.string());
60 }
61 
~RTPSource()62 NuPlayer::RTPSource::~RTPSource() {
63     if (mLooper != NULL) {
64         mLooper->unregisterHandler(id());
65         mLooper->unregisterHandler(mRTPConn->id());
66         mLooper->stop();
67     }
68 }
69 
getBufferingSettings(BufferingSettings * buffering)70 status_t NuPlayer::RTPSource::getBufferingSettings(
71             BufferingSettings* buffering /* nonnull */) {
72     Mutex::Autolock _l(mBufferingSettingsLock);
73     *buffering = mBufferingSettings;
74     return OK;
75 }
76 
setBufferingSettings(const BufferingSettings & buffering)77 status_t NuPlayer::RTPSource::setBufferingSettings(const BufferingSettings& buffering) {
78     Mutex::Autolock _l(mBufferingSettingsLock);
79     mBufferingSettings = buffering;
80     return OK;
81 }
82 
prepareAsync()83 void NuPlayer::RTPSource::prepareAsync() {
84     if (mLooper == NULL) {
85         mLooper = new ALooper;
86         mLooper->setName("rtp");
87         mLooper->start();
88 
89         mLooper->registerHandler(this);
90         mLooper->registerHandler(mRTPConn);
91     }
92 
93     CHECK_EQ(mState, (int)DISCONNECTED);
94     mState = CONNECTING;
95 
96     setParameters(mRTPParams);
97 
98     TrackInfo *info = NULL;
99     unsigned i;
100     for (i = 0; i < mTracks.size(); i++) {
101         info = &mTracks.editItemAt(i);
102 
103         if (info == NULL)
104             break;
105 
106         AString sdp;
107         ASessionDescription::SDPStringFactory(sdp, info->mLocalIp,
108                 info->mIsAudio, info->mLocalPort, info->mPayloadType, info->mAS, info->mCodecName,
109                 NULL, info->mWidth, info->mHeight, info->mCVOExtMap);
110         ALOGD("RTPSource SDP =>\n%s", sdp.c_str());
111 
112         sp<ASessionDescription> desc = new ASessionDescription;
113         bool isValidSdp = desc->setTo(sdp.c_str(), sdp.size());
114         ALOGV("RTPSource isValidSdp => %d", isValidSdp);
115 
116         int sockRtp, sockRtcp;
117         ARTPConnection::MakeRTPSocketPair(&sockRtp, &sockRtcp, info->mLocalIp, info->mRemoteIp,
118                 info->mLocalPort, info->mRemotePort, info->mSocketNetwork);
119 
120         sp<AMessage> notify = new AMessage('accu', this);
121 
122         ALOGV("RTPSource addStream. track-index=%d", i);
123         notify->setSize("trackIndex", i);
124         // index(i) should be started from 1. 0 is reserved for [root]
125         mRTPConn->addStream(sockRtp, sockRtcp, desc, i + 1, notify, false);
126         mRTPConn->setSelfID(info->mSelfID);
127         mRTPConn->setStaticJitterTimeMs(info->mJbTimeMs);
128 
129         unsigned long PT;
130         AString formatDesc, formatParams;
131         // index(i) should be started from 1. 0 is reserved for [root]
132         desc->getFormatType(i + 1, &PT, &formatDesc, &formatParams);
133 
134         int32_t clockRate, numChannels;
135         ASessionDescription::ParseFormatDesc(formatDesc.c_str(), &clockRate, &numChannels);
136         info->mTimeScale = clockRate;
137 
138         info->mRTPSocket = sockRtp;
139         info->mRTCPSocket = sockRtcp;
140         info->mFirstSeqNumInSegment = 0;
141         info->mNewSegment = true;
142         info->mAllowedStaleAccessUnits = kMaxAllowedStaleAccessUnits;
143         info->mRTPAnchor = 0;
144         info->mNTPAnchorUs = -1;
145         info->mNormalPlayTimeRTP = 0;
146         info->mNormalPlayTimeUs = 0ll;
147 
148         // index(i) should be started from 1. 0 is reserved for [root]
149         info->mPacketSource = new APacketSource(desc, i + 1);
150 
151         int32_t timeScale;
152         sp<MetaData> format = getTrackFormat(i, &timeScale);
153         sp<AnotherPacketSource> source = new AnotherPacketSource(format);
154 
155         if (info->mIsAudio) {
156             mAudioTrack = source;
157         } else {
158             mVideoTrack = source;
159         }
160 
161         info->mSource = source;
162         info->mRTPTime = 0;
163         info->mNormalPlaytimeUs = 0;
164         info->mNPTMappingValid = false;
165     }
166 
167     if (mInPreparationPhase) {
168         mInPreparationPhase = false;
169         notifyPrepared();
170     }
171 }
172 
start()173 void NuPlayer::RTPSource::start() {
174 }
175 
pause()176 void NuPlayer::RTPSource::pause() {
177     mState = PAUSED;
178 }
179 
resume()180 void NuPlayer::RTPSource::resume() {
181     mState = CONNECTING;
182 }
183 
stop()184 void NuPlayer::RTPSource::stop() {
185     if (mLooper == NULL) {
186         return;
187     }
188     sp<AMessage> msg = new AMessage(kWhatDisconnect, this);
189 
190     sp<AMessage> dummy;
191     msg->postAndAwaitResponse(&dummy);
192 }
193 
feedMoreTSData()194 status_t NuPlayer::RTPSource::feedMoreTSData() {
195     Mutex::Autolock _l(mBufferingLock);
196     return mFinalResult;
197 }
198 
getFormatMeta(bool audio)199 sp<MetaData> NuPlayer::RTPSource::getFormatMeta(bool audio) {
200     sp<AnotherPacketSource> source = getSource(audio);
201 
202     if (source == NULL) {
203         return NULL;
204     }
205 
206     return source->getFormat();
207 }
208 
haveSufficientDataOnAllTracks()209 bool NuPlayer::RTPSource::haveSufficientDataOnAllTracks() {
210     // We're going to buffer at least 2 secs worth data on all tracks before
211     // starting playback (both at startup and after a seek).
212 
213     static const int64_t kMinDurationUs = 2000000ll;
214 
215     int64_t mediaDurationUs = 0;
216     getDuration(&mediaDurationUs);
217     if ((mAudioTrack != NULL && mAudioTrack->isFinished(mediaDurationUs))
218             || (mVideoTrack != NULL && mVideoTrack->isFinished(mediaDurationUs))) {
219         return true;
220     }
221 
222     status_t err;
223     int64_t durationUs;
224     if (mAudioTrack != NULL
225             && (durationUs = mAudioTrack->getBufferedDurationUs(&err))
226                     < kMinDurationUs
227             && err == OK) {
228         ALOGV("audio track doesn't have enough data yet. (%.2f secs buffered)",
229               durationUs / 1E6);
230         return false;
231     }
232 
233     if (mVideoTrack != NULL
234             && (durationUs = mVideoTrack->getBufferedDurationUs(&err))
235                     < kMinDurationUs
236             && err == OK) {
237         ALOGV("video track doesn't have enough data yet. (%.2f secs buffered)",
238               durationUs / 1E6);
239         return false;
240     }
241 
242     return true;
243 }
244 
dequeueAccessUnit(bool audio,sp<ABuffer> * accessUnit)245 status_t NuPlayer::RTPSource::dequeueAccessUnit(
246         bool audio, sp<ABuffer> *accessUnit) {
247 
248     sp<AnotherPacketSource> source = getSource(audio);
249 
250     if (mState == PAUSED) {
251         ALOGV("-EWOULDBLOCK");
252         return -EWOULDBLOCK;
253     }
254 
255     status_t finalResult;
256     if (!source->hasBufferAvailable(&finalResult)) {
257         if (finalResult == OK) {
258             int64_t mediaDurationUs = 0;
259             getDuration(&mediaDurationUs);
260             sp<AnotherPacketSource> otherSource = getSource(!audio);
261             status_t otherFinalResult;
262 
263             // If other source already signaled EOS, this source should also signal EOS
264             if (otherSource != NULL &&
265                     !otherSource->hasBufferAvailable(&otherFinalResult) &&
266                     otherFinalResult == ERROR_END_OF_STREAM) {
267                 source->signalEOS(ERROR_END_OF_STREAM);
268                 return ERROR_END_OF_STREAM;
269             }
270 
271             // If this source has detected near end, give it some time to retrieve more
272             // data before signaling EOS
273             if (source->isFinished(mediaDurationUs)) {
274                 int64_t eosTimeout = audio ? mEOSTimeoutAudio : mEOSTimeoutVideo;
275                 if (eosTimeout == 0) {
276                     setEOSTimeout(audio, ALooper::GetNowUs());
277                 } else if ((ALooper::GetNowUs() - eosTimeout) > kNearEOSTimeoutUs) {
278                     setEOSTimeout(audio, 0);
279                     source->signalEOS(ERROR_END_OF_STREAM);
280                     return ERROR_END_OF_STREAM;
281                 }
282                 return -EWOULDBLOCK;
283             }
284 
285             if (!(otherSource != NULL && otherSource->isFinished(mediaDurationUs))) {
286                 // We should not enter buffering mode
287                 // if any of the sources already have detected EOS.
288                 // TODO: needs to be checked whether below line is needed or not.
289                 // startBufferingIfNecessary();
290             }
291 
292             return -EWOULDBLOCK;
293         }
294         return finalResult;
295     }
296 
297     setEOSTimeout(audio, 0);
298 
299     finalResult = source->dequeueAccessUnit(accessUnit);
300     if (finalResult != OK) {
301         return finalResult;
302     }
303 
304     int32_t cvo;
305     if ((*accessUnit) != NULL && (*accessUnit)->meta()->findInt32("cvo", &cvo) &&
306             cvo != mLastCVOUpdated) {
307         sp<AMessage> msg = new AMessage();
308         msg->setInt32("payload-type", ARTPSource::RTP_CVO);
309         msg->setInt32("cvo", cvo);
310 
311         sp<AMessage> notify = dupNotify();
312         notify->setInt32("what", kWhatIMSRxNotice);
313         notify->setMessage("message", msg);
314         notify->post();
315 
316         ALOGV("notify cvo updated (%d)->(%d) to upper layer", mLastCVOUpdated, cvo);
317         mLastCVOUpdated = cvo;
318     }
319 
320     return finalResult;
321 }
322 
getSource(bool audio)323 sp<AnotherPacketSource> NuPlayer::RTPSource::getSource(bool audio) {
324     return audio ? mAudioTrack : mVideoTrack;
325 }
326 
setEOSTimeout(bool audio,int64_t timeout)327 void NuPlayer::RTPSource::setEOSTimeout(bool audio, int64_t timeout) {
328     if (audio) {
329         mEOSTimeoutAudio = timeout;
330     } else {
331         mEOSTimeoutVideo = timeout;
332     }
333 }
334 
getDuration(int64_t * durationUs)335 status_t NuPlayer::RTPSource::getDuration(int64_t *durationUs) {
336     *durationUs = 0ll;
337 
338     int64_t audioDurationUs;
339     if (mAudioTrack != NULL
340             && mAudioTrack->getFormat()->findInt64(
341                 kKeyDuration, &audioDurationUs)
342             && audioDurationUs > *durationUs) {
343         *durationUs = audioDurationUs;
344     }
345 
346     int64_t videoDurationUs;
347     if (mVideoTrack != NULL
348             && mVideoTrack->getFormat()->findInt64(
349                 kKeyDuration, &videoDurationUs)
350             && videoDurationUs > *durationUs) {
351         *durationUs = videoDurationUs;
352     }
353 
354     return OK;
355 }
356 
seekTo(int64_t seekTimeUs,MediaPlayerSeekMode mode)357 status_t NuPlayer::RTPSource::seekTo(int64_t seekTimeUs, MediaPlayerSeekMode mode) {
358     ALOGV("RTPSource::seekTo=%d, mode=%d", (int)seekTimeUs, mode);
359     return OK;
360 }
361 
schedulePollBuffering()362 void NuPlayer::RTPSource::schedulePollBuffering() {
363     sp<AMessage> msg = new AMessage(kWhatPollBuffering, this);
364     msg->post(kBufferingPollIntervalUs); // 1 second intervals
365 }
366 
onPollBuffering()367 void NuPlayer::RTPSource::onPollBuffering() {
368     schedulePollBuffering();
369 }
370 
isRealTime() const371 bool NuPlayer::RTPSource::isRealTime() const {
372     ALOGD("RTPSource::isRealTime=%d", true);
373     return true;
374 }
375 
onMessageReceived(const sp<AMessage> & msg)376 void NuPlayer::RTPSource::onMessageReceived(const sp<AMessage> &msg) {
377     ALOGV("onMessageReceived =%d", msg->what());
378 
379     switch (msg->what()) {
380         case kWhatAccessUnitComplete:
381         {
382             if (mState == CONNECTING) {
383                 mState = CONNECTED;
384             }
385 
386             int32_t timeUpdate;
387             //"time-update" raised from ARTPConnection::parseSR()
388             if (msg->findInt32("time-update", &timeUpdate) && timeUpdate) {
389                 size_t trackIndex;
390                 CHECK(msg->findSize("trackIndex", &trackIndex));
391 
392                 uint32_t rtpTime;
393                 uint64_t ntpTime;
394                 CHECK(msg->findInt32("rtp-time", (int32_t *)&rtpTime));
395                 CHECK(msg->findInt64("ntp-time", (int64_t *)&ntpTime));
396 
397                 onTimeUpdate(trackIndex, rtpTime, ntpTime);
398             }
399 
400             int32_t IMSRxNotice;
401             if (msg->findInt32("rtcp-event", &IMSRxNotice)) {
402                 int32_t payloadType = 0, feedbackType = 0;
403                 CHECK(msg->findInt32("payload-type", &payloadType));
404                 msg->findInt32("feedback-type", &feedbackType);
405 
406                 sp<AMessage> notify = dupNotify();
407                 notify->setInt32("what", kWhatIMSRxNotice);
408                 notify->setMessage("message", msg);
409                 notify->post();
410 
411                 ALOGV("IMSRxNotice \t\t payload : %d feedback : %d",
412                       payloadType, feedbackType);
413                 break;
414             }
415 
416             size_t trackIndex;
417             CHECK(msg->findSize("trackIndex", &trackIndex));
418 
419             sp<ABuffer> accessUnit;
420             if (msg->findBuffer("access-unit", &accessUnit) == false) {
421                 break;
422             }
423 
424             int32_t damaged;
425             if (accessUnit->meta()->findInt32("damaged", &damaged)
426                     && damaged) {
427                 ALOGD("dropping damaged access unit.");
428                 break;
429             }
430 
431             // Implicitly assert on valid trackIndex here, which we ensure by
432             // never removing tracks.
433             TrackInfo *info = &mTracks.editItemAt(trackIndex);
434 
435             sp<AnotherPacketSource> source = info->mSource;
436             if (source != NULL) {
437                 uint32_t rtpTime;
438                 CHECK(accessUnit->meta()->findInt32("rtp-time", (int32_t *)&rtpTime));
439 
440                 /* AnotherPacketSource make an assertion if there is no ntp provided
441                    RTPSource should provide ntpUs all the times.
442                 if (!info->mNPTMappingValid) {
443                     // This is a live stream, we didn't receive any normal
444                     // playtime mapping. We won't map to npt time.
445                     source->queueAccessUnit(accessUnit);
446                     break;
447                 }
448 
449                 int64_t nptUs =
450                     ((double)rtpTime - (double)info->mRTPTime)
451                         / info->mTimeScale
452                         * 1000000ll
453                         + info->mNormalPlaytimeUs;
454 
455                 */
456                 accessUnit->meta()->setInt64("timeUs", ALooper::GetNowUs());
457 
458                 source->queueAccessUnit(accessUnit);
459             }
460 
461             break;
462         }
463         case kWhatDisconnect:
464         {
465             sp<AReplyToken> replyID;
466             CHECK(msg->senderAwaitsResponse(&replyID));
467 
468             for (size_t i = 0; i < mTracks.size(); ++i) {
469                 TrackInfo *info = &mTracks.editItemAt(i);
470 
471                 if (info->mIsAudio) {
472                     mAudioTrack->signalEOS(ERROR_END_OF_STREAM);
473                     mAudioTrack = NULL;
474                     ALOGV("mAudioTrack disconnected");
475                 } else {
476                     mVideoTrack->signalEOS(ERROR_END_OF_STREAM);
477                     mVideoTrack = NULL;
478                     ALOGV("mVideoTrack disconnected");
479                 }
480 
481                 mRTPConn->removeStream(info->mRTPSocket, info->mRTCPSocket);
482                 close(info->mRTPSocket);
483                 close(info->mRTCPSocket);
484             }
485 
486             mTracks.clear();
487             mFirstAccessUnit = true;
488             mAllTracksHaveTime = false;
489             mNTPAnchorUs = -1;
490             mMediaAnchorUs = -1;
491             mLastMediaTimeUs = -1;
492             mNumAccessUnitsReceived = 0;
493             mReceivedFirstRTCPPacket = false;
494             mReceivedFirstRTPPacket = false;
495             mPausing = false;
496             mPauseGeneration = 0;
497 
498             (new AMessage)->postReply(replyID);
499 
500             break;
501         }
502         case kWhatPollBuffering:
503             break;
504         default:
505             TRESPASS();
506     }
507 }
508 
setTargetBitrate(int32_t bitrate)509 void NuPlayer::RTPSource::setTargetBitrate(int32_t bitrate) {
510     mRTPConn->setTargetBitrate(bitrate);
511 }
512 
onTimeUpdate(int32_t trackIndex,uint32_t rtpTime,uint64_t ntpTime)513 void NuPlayer::RTPSource::onTimeUpdate(int32_t trackIndex, uint32_t rtpTime, uint64_t ntpTime) {
514     ALOGV("onTimeUpdate track %d, rtpTime = 0x%08x, ntpTime = %#016llx",
515          trackIndex, rtpTime, (long long)ntpTime);
516 
517     // convert ntpTime in Q32 seconds to microseconds. Note: this will not lose precision
518     // because ntpTimeUs is at most 52 bits (double holds 53 bits)
519     int64_t ntpTimeUs = (int64_t)(ntpTime * 1E6 / (1ll << 32));
520 
521     TrackInfo *track = &mTracks.editItemAt(trackIndex);
522 
523     track->mRTPAnchor = rtpTime;
524     track->mNTPAnchorUs = ntpTimeUs;
525 
526     if (mNTPAnchorUs < 0) {
527         mNTPAnchorUs = ntpTimeUs;
528         mMediaAnchorUs = mLastMediaTimeUs;
529     }
530 
531     if (!mAllTracksHaveTime) {
532         bool allTracksHaveTime = (mTracks.size() > 0);
533         for (size_t i = 0; i < mTracks.size(); ++i) {
534             TrackInfo *track = &mTracks.editItemAt(i);
535             if (track->mNTPAnchorUs < 0) {
536                 allTracksHaveTime = false;
537                 break;
538             }
539         }
540         if (allTracksHaveTime) {
541             mAllTracksHaveTime = true;
542             ALOGI("Time now established for all tracks.");
543         }
544     }
545     if (mAllTracksHaveTime && dataReceivedOnAllChannels()) {
546         // Time is now established, lets start timestamping immediately
547         for (size_t i = 0; i < mTracks.size(); ++i) {
548             TrackInfo *trackInfo = &mTracks.editItemAt(i);
549             while (!trackInfo->mPackets.empty()) {
550                 sp<ABuffer> accessUnit = *trackInfo->mPackets.begin();
551                 trackInfo->mPackets.erase(trackInfo->mPackets.begin());
552 
553                 if (addMediaTimestamp(i, trackInfo, accessUnit)) {
554                     postQueueAccessUnit(i, accessUnit);
555                 }
556             }
557         }
558     }
559 }
560 
addMediaTimestamp(int32_t trackIndex,const TrackInfo * track,const sp<ABuffer> & accessUnit)561 bool NuPlayer::RTPSource::addMediaTimestamp(
562         int32_t trackIndex, const TrackInfo *track,
563         const sp<ABuffer> &accessUnit) {
564 
565     uint32_t rtpTime;
566     CHECK(accessUnit->meta()->findInt32(
567                 "rtp-time", (int32_t *)&rtpTime));
568 
569     int64_t relRtpTimeUs =
570         (((int64_t)rtpTime - (int64_t)track->mRTPAnchor) * 1000000ll)
571         / track->mTimeScale;
572 
573     int64_t ntpTimeUs = track->mNTPAnchorUs + relRtpTimeUs;
574 
575     int64_t mediaTimeUs = mMediaAnchorUs + ntpTimeUs - mNTPAnchorUs;
576 
577     if (mediaTimeUs > mLastMediaTimeUs) {
578         mLastMediaTimeUs = mediaTimeUs;
579     }
580 
581     if (mediaTimeUs < 0) {
582         ALOGV("dropping early accessUnit.");
583         return false;
584     }
585 
586     ALOGV("track %d rtpTime=%u mediaTimeUs = %lld us (%.2f secs)",
587             trackIndex, rtpTime, (long long)mediaTimeUs, mediaTimeUs / 1E6);
588 
589     accessUnit->meta()->setInt64("timeUs", mediaTimeUs);
590 
591     return true;
592 }
593 
dataReceivedOnAllChannels()594 bool NuPlayer::RTPSource::dataReceivedOnAllChannels() {
595     TrackInfo *track;
596     for (size_t i = 0; i < mTracks.size(); ++i) {
597         track = &mTracks.editItemAt(i);
598         if (track->mPackets.empty()) {
599             return false;
600         }
601     }
602     return true;
603 }
604 
postQueueAccessUnit(size_t trackIndex,const sp<ABuffer> & accessUnit)605 void NuPlayer::RTPSource::postQueueAccessUnit(
606         size_t trackIndex, const sp<ABuffer> &accessUnit) {
607     sp<AMessage> msg = new AMessage(kWhatAccessUnit, this);
608     msg->setInt32("what", kWhatAccessUnit);
609     msg->setSize("trackIndex", trackIndex);
610     msg->setBuffer("accessUnit", accessUnit);
611     msg->post();
612 }
613 
postQueueEOS(size_t trackIndex,status_t finalResult)614 void NuPlayer::RTPSource::postQueueEOS(size_t trackIndex, status_t finalResult) {
615     sp<AMessage> msg = new AMessage(kWhatEOS, this);
616     msg->setInt32("what", kWhatEOS);
617     msg->setSize("trackIndex", trackIndex);
618     msg->setInt32("finalResult", finalResult);
619     msg->post();
620 }
621 
getTrackFormat(size_t index,int32_t * timeScale)622 sp<MetaData> NuPlayer::RTPSource::getTrackFormat(size_t index, int32_t *timeScale) {
623     CHECK_GE(index, 0u);
624     CHECK_LT(index, mTracks.size());
625 
626     const TrackInfo &info = mTracks.itemAt(index);
627 
628     *timeScale = info.mTimeScale;
629 
630     return info.mPacketSource->getFormat();
631 }
632 
onConnected()633 void NuPlayer::RTPSource::onConnected() {
634     ALOGV("onConnected");
635     mState = CONNECTED;
636 }
637 
onDisconnected(const sp<AMessage> & msg)638 void NuPlayer::RTPSource::onDisconnected(const sp<AMessage> &msg) {
639     if (mState == DISCONNECTED) {
640         return;
641     }
642 
643     status_t err;
644     CHECK(msg->findInt32("result", &err));
645     CHECK_NE(err, (status_t)OK);
646 
647 //    mLooper->unregisterHandler(mHandler->id());
648 //    mHandler.clear();
649 
650     if (mState == CONNECTING) {
651         // We're still in the preparation phase, signal that it
652         // failed.
653         notifyPrepared(err);
654     }
655 
656     mState = DISCONNECTED;
657 //    setError(err);
658 
659 }
660 
setParameter(const String8 & key,const String8 & value)661 status_t NuPlayer::RTPSource::setParameter(const String8 &key, const String8 &value) {
662     ALOGV("setParameter: key (%s) => value (%s)", key.string(), value.string());
663 
664     bool isAudioKey = key.contains("audio");
665     TrackInfo *info = NULL;
666     for (unsigned i = 0; i < mTracks.size(); ++i) {
667         info = &mTracks.editItemAt(i);
668         if (info != NULL && info->mIsAudio == isAudioKey) {
669             ALOGV("setParameter: %s track (%d) found", isAudioKey ? "audio" : "video" , i);
670             break;
671         }
672     }
673 
674     if (info == NULL) {
675         TrackInfo newTrackInfo;
676         newTrackInfo.mIsAudio = isAudioKey;
677         mTracks.push(newTrackInfo);
678         info = &mTracks.editTop();
679         info->mJbTimeMs = kStaticJitterTimeMs;
680     }
681 
682     if (key == "rtp-param-mime-type") {
683         info->mMimeType = value;
684 
685         const char *mime = value.string();
686         const char *delimiter = strchr(mime, '/');
687         info->mCodecName = delimiter ? (delimiter + 1) : "<none>";
688 
689         ALOGV("rtp-param-mime-type: mMimeType (%s) => mCodecName (%s)",
690                 info->mMimeType.string(), info->mCodecName.string());
691     } else if (key == "video-param-decoder-profile") {
692         info->mCodecProfile = atoi(value);
693     } else if (key == "video-param-decoder-level") {
694         info->mCodecLevel = atoi(value);
695     } else if (key == "video-param-width") {
696         info->mWidth = atoi(value);
697     } else if (key == "video-param-height") {
698         info->mHeight = atoi(value);
699     } else if (key == "rtp-param-local-ip") {
700         info->mLocalIp = value;
701     } else if (key == "rtp-param-local-port") {
702         info->mLocalPort = atoi(value);
703     } else if (key == "rtp-param-remote-ip") {
704         info->mRemoteIp = value;
705     } else if (key == "rtp-param-remote-port") {
706         info->mRemotePort = atoi(value);
707     } else if (key == "rtp-param-payload-type") {
708         info->mPayloadType = atoi(value);
709     } else if (key == "rtp-param-as") {
710         //AS means guaranteed bit rate that negotiated from sdp.
711         info->mAS = atoi(value);
712     } else if (key == "rtp-param-rtp-timeout") {
713     } else if (key == "rtp-param-rtcp-timeout") {
714     } else if (key == "rtp-param-time-scale") {
715     } else if (key == "rtp-param-self-id") {
716         info->mSelfID = atoi(value);
717     } else if (key == "rtp-param-ext-cvo-extmap") {
718         info->mCVOExtMap = atoi(value);
719     } else if (key == "rtp-param-set-socket-network") {
720         int64_t networkHandle = atoll(value);
721         setSocketNetwork(networkHandle);
722     } else if (key == "rtp-param-jitter-buffer-time") {
723         // clamping min at 40, max at 3000
724         info->mJbTimeMs = std::min(std::max(40, atoi(value)), 3000);
725     }
726 
727     return OK;
728 }
729 
setParameters(const String8 & params)730 status_t NuPlayer::RTPSource::setParameters(const String8 &params) {
731     ALOGV("setParameters: %s", params.string());
732     const char *cparams = params.string();
733     const char *key_start = cparams;
734     for (;;) {
735         const char *equal_pos = strchr(key_start, '=');
736         if (equal_pos == NULL) {
737             ALOGE("Parameters %s miss a value", cparams);
738             return BAD_VALUE;
739         }
740         String8 key(key_start, equal_pos - key_start);
741         TrimString(&key);
742         if (key.length() == 0) {
743             ALOGE("Parameters %s contains an empty key", cparams);
744             return BAD_VALUE;
745         }
746         const char *value_start = equal_pos + 1;
747         const char *semicolon_pos = strchr(value_start, ';');
748         String8 value;
749         if (semicolon_pos == NULL) {
750             value.setTo(value_start);
751         } else {
752             value.setTo(value_start, semicolon_pos - value_start);
753         }
754         if (setParameter(key, value) != OK) {
755             return BAD_VALUE;
756         }
757         if (semicolon_pos == NULL) {
758             break;  // Reaches the end
759         }
760         key_start = semicolon_pos + 1;
761     }
762     return OK;
763 }
764 
setSocketNetwork(int64_t networkHandle)765 void NuPlayer::RTPSource::setSocketNetwork(int64_t networkHandle) {
766     ALOGV("setSocketNetwork: %llu", (unsigned long long)networkHandle);
767 
768     TrackInfo *info = NULL;
769     for (size_t i = 0; i < mTracks.size(); ++i) {
770         info = &mTracks.editItemAt(i);
771 
772         if (info == NULL)
773             break;
774 
775         info->mSocketNetwork = networkHandle;
776     }
777 }
778 
779 // Trim both leading and trailing whitespace from the given string.
780 //static
TrimString(String8 * s)781 void NuPlayer::RTPSource::TrimString(String8 *s) {
782     size_t num_bytes = s->bytes();
783     const char *data = s->string();
784 
785     size_t leading_space = 0;
786     while (leading_space < num_bytes && isspace(data[leading_space])) {
787         ++leading_space;
788     }
789 
790     size_t i = num_bytes;
791     while (i > leading_space && isspace(data[i - 1])) {
792         --i;
793     }
794 
795     s->setTo(String8(&data[leading_space], i - leading_space));
796 }
797 
798 }  // namespace android
799