1 /*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 //#define USE_LOG SLAndroidLogLevel_Verbose
18
19 #include "sles_allinclusive.h"
20 #include "android/android_AudioSfDecoder.h"
21 #include "android/channels.h"
22
23 #include <binder/IServiceManager.h>
24 #include <media/IMediaHTTPService.h>
25 #include <media/stagefright/foundation/ADebug.h>
26 #include <media/stagefright/DataSourceFactory.h>
27 #include <media/stagefright/InterfaceUtils.h>
28 #include <media/stagefright/MediaBuffer.h>
29 #include <media/stagefright/MediaExtractorFactory.h>
30 #include <media/stagefright/SimpleDecodingSource.h>
31
32
33 #define SIZE_CACHED_HIGH_BYTES 1000000
34 #define SIZE_CACHED_MED_BYTES 700000
35 #define SIZE_CACHED_LOW_BYTES 400000
36
37 namespace android {
38
39 //--------------------------------------------------------------------------------------------------
AudioSfDecoder(const AudioPlayback_Parameters * params)40 AudioSfDecoder::AudioSfDecoder(const AudioPlayback_Parameters* params) : GenericPlayer(params),
41 mDataSource(0),
42 mAudioSource(0),
43 mAudioSourceStarted(false),
44 mBitrate(-1),
45 mDurationUsec(ANDROID_UNKNOWN_TIME),
46 mDecodeBuffer(NULL),
47 mSeekTimeMsec(0),
48 // play event logic depends on the initial time being zero not ANDROID_UNKNOWN_TIME
49 mLastDecodedPositionUs(0)
50 {
51 SL_LOGD("AudioSfDecoder::AudioSfDecoder()");
52 }
53
54
~AudioSfDecoder()55 AudioSfDecoder::~AudioSfDecoder() {
56 SL_LOGD("AudioSfDecoder::~AudioSfDecoder()");
57 }
58
59
preDestroy()60 void AudioSfDecoder::preDestroy() {
61 GenericPlayer::preDestroy();
62 SL_LOGD("AudioSfDecoder::preDestroy()");
63 {
64 Mutex::Autolock _l(mBufferSourceLock);
65
66 if (NULL != mDecodeBuffer) {
67 mDecodeBuffer->release();
68 mDecodeBuffer = NULL;
69 }
70
71 if ((mAudioSource != 0) && mAudioSourceStarted) {
72 mAudioSource->stop();
73 mAudioSourceStarted = false;
74 }
75 }
76 }
77
78
79 //--------------------------------------------------
play()80 void AudioSfDecoder::play() {
81 SL_LOGD("AudioSfDecoder::play");
82
83 GenericPlayer::play();
84 (new AMessage(kWhatDecode, this))->post();
85 }
86
87
getPositionMsec(int * msec)88 void AudioSfDecoder::getPositionMsec(int* msec) {
89 int64_t timeUsec = getPositionUsec();
90 if (timeUsec == ANDROID_UNKNOWN_TIME) {
91 *msec = ANDROID_UNKNOWN_TIME;
92 } else {
93 *msec = timeUsec / 1000;
94 }
95 }
96
97
98 //--------------------------------------------------
getPcmFormatKeyCount() const99 uint32_t AudioSfDecoder::getPcmFormatKeyCount() const {
100 return NB_PCMMETADATA_KEYS;
101 }
102
103
104 //--------------------------------------------------
getPcmFormatKeySize(uint32_t index,uint32_t * pKeySize)105 bool AudioSfDecoder::getPcmFormatKeySize(uint32_t index, uint32_t* pKeySize) {
106 if (index >= NB_PCMMETADATA_KEYS) {
107 return false;
108 } else {
109 *pKeySize = strlen(kPcmDecodeMetadataKeys[index]) +1;
110 return true;
111 }
112 }
113
114
115 //--------------------------------------------------
getPcmFormatKeyName(uint32_t index,uint32_t keySize,char * keyName)116 bool AudioSfDecoder::getPcmFormatKeyName(uint32_t index, uint32_t keySize, char* keyName) {
117 uint32_t actualKeySize;
118 if (!getPcmFormatKeySize(index, &actualKeySize)) {
119 return false;
120 }
121 if (keySize < actualKeySize) {
122 return false;
123 }
124 strncpy(keyName, kPcmDecodeMetadataKeys[index], actualKeySize);
125 return true;
126 }
127
128
129 //--------------------------------------------------
getPcmFormatValueSize(uint32_t index,uint32_t * pValueSize)130 bool AudioSfDecoder::getPcmFormatValueSize(uint32_t index, uint32_t* pValueSize) {
131 if (index >= NB_PCMMETADATA_KEYS) {
132 *pValueSize = 0;
133 return false;
134 } else {
135 *pValueSize = sizeof(uint32_t);
136 return true;
137 }
138 }
139
140
141 //--------------------------------------------------
getPcmFormatKeyValue(uint32_t index,uint32_t size,uint32_t * pValue)142 bool AudioSfDecoder::getPcmFormatKeyValue(uint32_t index, uint32_t size, uint32_t* pValue) {
143 uint32_t valueSize = 0;
144 if (!getPcmFormatValueSize(index, &valueSize)) {
145 return false;
146 } else if (size != valueSize) {
147 // this ensures we are accessing mPcmFormatValues with a valid size for that index
148 SL_LOGE("Error retrieving metadata value at index %d: using size of %d, should be %d",
149 index, size, valueSize);
150 return false;
151 } else {
152 android::Mutex::Autolock autoLock(mPcmFormatLock);
153 *pValue = mPcmFormatValues[index];
154 return true;
155 }
156 }
157
158
159 //--------------------------------------------------
160 // Event handlers
161 // it is strictly verboten to call those methods outside of the event loop
162
163 // Initializes the data and audio sources, and update the PCM format info
164 // post-condition: upon successful initialization based on the player data locator
165 // GenericPlayer::onPrepare() was called
166 // mDataSource != 0
167 // mAudioSource != 0
168 // mAudioSourceStarted == true
169 // All error returns from this method are via notifyPrepared(status) followed by "return".
onPrepare()170 void AudioSfDecoder::onPrepare() {
171 SL_LOGD("AudioSfDecoder::onPrepare()");
172 Mutex::Autolock _l(mBufferSourceLock);
173
174 {
175 android::Mutex::Autolock autoLock(mPcmFormatLock);
176 // Initialize the PCM format info with the known parameters before the start of the decode
177 mPcmFormatValues[ANDROID_KEY_INDEX_PCMFORMAT_BITSPERSAMPLE] = SL_PCMSAMPLEFORMAT_FIXED_16;
178 mPcmFormatValues[ANDROID_KEY_INDEX_PCMFORMAT_CONTAINERSIZE] = 16;
179 mPcmFormatValues[ANDROID_KEY_INDEX_PCMFORMAT_ENDIANNESS] = SL_BYTEORDER_LITTLEENDIAN;
180 // initialization with the default values: they will be replaced by the actual values
181 // once the decoder has figured them out
182 mPcmFormatValues[ANDROID_KEY_INDEX_PCMFORMAT_NUMCHANNELS] = UNKNOWN_NUMCHANNELS;
183 mPcmFormatValues[ANDROID_KEY_INDEX_PCMFORMAT_SAMPLERATE] = UNKNOWN_SAMPLERATE;
184 mPcmFormatValues[ANDROID_KEY_INDEX_PCMFORMAT_CHANNELMASK] = SL_ANDROID_UNKNOWN_CHANNELMASK;
185 }
186
187 //---------------------------------
188 // Instantiate and initialize the data source for the decoder
189 sp<DataSource> dataSource;
190
191 switch (mDataLocatorType) {
192
193 case kDataLocatorNone:
194 SL_LOGE("AudioSfDecoder::onPrepare: no data locator set");
195 notifyPrepared(MEDIA_ERROR_BASE);
196 return;
197
198 case kDataLocatorUri:
199 dataSource = DataSourceFactory::CreateFromURI(
200 NULL /* XXX httpService */, mDataLocator.uriRef);
201 if (dataSource == NULL) {
202 SL_LOGE("AudioSfDecoder::onPrepare(): Error opening %s", mDataLocator.uriRef);
203 notifyPrepared(MEDIA_ERROR_BASE);
204 return;
205 }
206 break;
207
208 case kDataLocatorFd:
209 {
210 // As FileSource unconditionally takes ownership of the fd and closes it, then
211 // we have to make a dup for FileSource if the app wants to keep ownership itself
212 int fd = mDataLocator.fdi.fd;
213 if (mDataLocator.fdi.mCloseAfterUse) {
214 mDataLocator.fdi.mCloseAfterUse = false;
215 } else {
216 fd = ::dup(fd);
217 }
218 dataSource = new FileSource(fd, mDataLocator.fdi.offset, mDataLocator.fdi.length);
219 status_t err = dataSource->initCheck();
220 if (err != OK) {
221 notifyPrepared(err);
222 return;
223 }
224 break;
225 }
226
227 // AndroidBufferQueue data source is handled by a subclass,
228 // which does not call up to this method. Hence, the missing case.
229 default:
230 TRESPASS();
231 }
232
233 //---------------------------------
234 // Instantiate and initialize the decoder attached to the data source
235 sp<IMediaExtractor> extractor = MediaExtractorFactory::Create(dataSource);
236 if (extractor == NULL) {
237 SL_LOGE("AudioSfDecoder::onPrepare: Could not instantiate extractor.");
238 notifyPrepared(ERROR_UNSUPPORTED);
239 return;
240 }
241
242 ssize_t audioTrackIndex = -1;
243 bool isRawAudio = false;
244 for (size_t i = 0; i < extractor->countTracks(); ++i) {
245 sp<MetaData> meta = extractor->getTrackMetaData(i);
246
247 const char *mime;
248 CHECK(meta->findCString(kKeyMIMEType, &mime));
249
250 if (!strncasecmp("audio/", mime, 6)) {
251 if (isSupportedCodec(mime)) {
252 audioTrackIndex = i;
253
254 if (!strcasecmp(MEDIA_MIMETYPE_AUDIO_RAW, mime)) {
255 isRawAudio = true;
256 }
257 break;
258 }
259 }
260 }
261
262 if (audioTrackIndex < 0) {
263 SL_LOGE("AudioSfDecoder::onPrepare: Could not find a supported audio track.");
264 notifyPrepared(ERROR_UNSUPPORTED);
265 return;
266 }
267
268 sp<MediaSource> source = CreateMediaSourceFromIMediaSource(
269 extractor->getTrack(audioTrackIndex));
270 sp<MetaData> meta = source->getFormat();
271
272 // we can't trust the OMXCodec (if there is one) to issue a INFO_FORMAT_CHANGED so we want
273 // to have some meaningful values as soon as possible.
274 int32_t channelCount;
275 bool hasChannelCount = meta->findInt32(kKeyChannelCount, &channelCount);
276 int32_t sr;
277 bool hasSampleRate = meta->findInt32(kKeySampleRate, &sr);
278
279 // first compute the duration
280 off64_t size;
281 int64_t durationUs;
282 int32_t durationMsec;
283 if (dataSource->getSize(&size) == OK
284 && meta->findInt64(kKeyDuration, &durationUs)) {
285 if (durationUs != 0) {
286 mBitrate = size * 8000000LL / durationUs; // in bits/sec
287 } else {
288 mBitrate = -1;
289 }
290 mDurationUsec = durationUs;
291 durationMsec = durationUs / 1000;
292 } else {
293 mBitrate = -1;
294 mDurationUsec = ANDROID_UNKNOWN_TIME;
295 durationMsec = ANDROID_UNKNOWN_TIME;
296 }
297
298 // then assign the duration under the settings lock
299 {
300 Mutex::Autolock _l(mSettingsLock);
301 mDurationMsec = durationMsec;
302 }
303
304 // the audio content is not raw PCM, so we need a decoder
305 if (!isRawAudio) {
306 source = SimpleDecodingSource::Create(source);
307 if (source == NULL) {
308 SL_LOGE("AudioSfDecoder::onPrepare: Could not instantiate decoder.");
309 notifyPrepared(ERROR_UNSUPPORTED);
310 return;
311 }
312
313 meta = source->getFormat();
314 }
315
316
317 if (source->start() != OK) {
318 SL_LOGE("AudioSfDecoder::onPrepare: Failed to start source/decoder.");
319 notifyPrepared(MEDIA_ERROR_BASE);
320 return;
321 }
322
323 //---------------------------------
324 // The data source, and audio source (a decoder if required) are ready to be used
325 mDataSource = dataSource;
326 mAudioSource = source;
327 mAudioSourceStarted = true;
328
329 if (!hasChannelCount) {
330 CHECK(meta->findInt32(kKeyChannelCount, &channelCount));
331 }
332
333 if (!hasSampleRate) {
334 CHECK(meta->findInt32(kKeySampleRate, &sr));
335 }
336 // FIXME add code below once channel mask support is in, currently initialized to default
337 // value computed from the channel count
338 // if (!hasChannelMask) {
339 // CHECK(meta->findInt32(kKeyChannelMask, &channelMask));
340 // }
341
342 if (!wantPrefetch()) {
343 SL_LOGV("AudioSfDecoder::onPrepare: no need to prefetch");
344 // doesn't need prefetching, notify good to go
345 mCacheStatus = kStatusHigh;
346 mCacheFill = 1000;
347 notifyStatus();
348 notifyCacheFill();
349 }
350
351 {
352 android::Mutex::Autolock autoLock(mPcmFormatLock);
353 mPcmFormatValues[ANDROID_KEY_INDEX_PCMFORMAT_SAMPLERATE] = sr;
354 mPcmFormatValues[ANDROID_KEY_INDEX_PCMFORMAT_NUMCHANNELS] = channelCount;
355 mPcmFormatValues[ANDROID_KEY_INDEX_PCMFORMAT_CHANNELMASK] =
356 sles_channel_out_mask_from_count(channelCount);
357 }
358
359 // at this point we have enough information about the source to create the sink that
360 // will consume the data
361 createAudioSink();
362
363 // signal successful completion of prepare
364 mStateFlags |= kFlagPrepared;
365
366 GenericPlayer::onPrepare();
367 SL_LOGD("AudioSfDecoder::onPrepare() done, mStateFlags=0x%x", mStateFlags);
368 }
369
370
onPause()371 void AudioSfDecoder::onPause() {
372 SL_LOGV("AudioSfDecoder::onPause()");
373 GenericPlayer::onPause();
374 pauseAudioSink();
375 }
376
377
onPlay()378 void AudioSfDecoder::onPlay() {
379 SL_LOGV("AudioSfDecoder::onPlay()");
380 GenericPlayer::onPlay();
381 startAudioSink();
382 }
383
384
onSeek(const sp<AMessage> & msg)385 void AudioSfDecoder::onSeek(const sp<AMessage> &msg) {
386 SL_LOGV("AudioSfDecoder::onSeek");
387 int64_t timeMsec;
388 CHECK(msg->findInt64(WHATPARAM_SEEK_SEEKTIME_MS, &timeMsec));
389
390 Mutex::Autolock _l(mTimeLock);
391 mStateFlags |= kFlagSeeking;
392 mSeekTimeMsec = timeMsec;
393 // don't set mLastDecodedPositionUs to ANDROID_UNKNOWN_TIME; getPositionUsec
394 // ignores mLastDecodedPositionUs while seeking, and substitutes the seek goal instead
395
396 // nop for now
397 GenericPlayer::onSeek(msg);
398 }
399
400
onLoop(const sp<AMessage> & msg)401 void AudioSfDecoder::onLoop(const sp<AMessage> &msg) {
402 SL_LOGV("AudioSfDecoder::onLoop");
403 int32_t loop;
404 CHECK(msg->findInt32(WHATPARAM_LOOP_LOOPING, &loop));
405
406 if (loop) {
407 //SL_LOGV("AudioSfDecoder::onLoop start looping");
408 mStateFlags |= kFlagLooping;
409 } else {
410 //SL_LOGV("AudioSfDecoder::onLoop stop looping");
411 mStateFlags &= ~kFlagLooping;
412 }
413
414 // nop for now
415 GenericPlayer::onLoop(msg);
416 }
417
418
onCheckCache(const sp<AMessage> & msg)419 void AudioSfDecoder::onCheckCache(const sp<AMessage> &msg) {
420 //SL_LOGV("AudioSfDecoder::onCheckCache");
421 bool eos;
422 CacheStatus_t status = getCacheRemaining(&eos);
423
424 if (eos || status == kStatusHigh
425 || ((mStateFlags & kFlagPreparing) && (status >= kStatusEnough))) {
426 if (mStateFlags & kFlagPlaying) {
427 startAudioSink();
428 }
429 mStateFlags &= ~kFlagBuffering;
430
431 SL_LOGV("AudioSfDecoder::onCheckCache: buffering done.");
432
433 if (mStateFlags & kFlagPreparing) {
434 //SL_LOGV("AudioSfDecoder::onCheckCache: preparation done.");
435 mStateFlags &= ~kFlagPreparing;
436 }
437
438 if (mStateFlags & kFlagPlaying) {
439 (new AMessage(kWhatDecode, this))->post();
440 }
441 return;
442 }
443
444 msg->post(100000);
445 }
446
447
onDecode()448 void AudioSfDecoder::onDecode() {
449 SL_LOGV("AudioSfDecoder::onDecode");
450
451 //-------------------------------- Need to buffer some more before decoding?
452 bool eos;
453 if (mDataSource == 0) {
454 // application set play state to paused which failed, then set play state to playing
455 return;
456 }
457
458 if (wantPrefetch()
459 && (getCacheRemaining(&eos) == kStatusLow)
460 && !eos) {
461 SL_LOGV("buffering more.");
462
463 if (mStateFlags & kFlagPlaying) {
464 pauseAudioSink();
465 }
466 mStateFlags |= kFlagBuffering;
467 (new AMessage(kWhatCheckCache, this))->post(100000);
468 return;
469 }
470
471 if (!(mStateFlags & (kFlagPlaying | kFlagBuffering | kFlagPreparing))) {
472 // don't decode if we're not buffering, prefetching or playing
473 //SL_LOGV("don't decode: not buffering, prefetching or playing");
474 return;
475 }
476
477 //-------------------------------- Decode
478 status_t err;
479 MediaSource::ReadOptions readOptions;
480 if (mStateFlags & kFlagSeeking) {
481 assert(mSeekTimeMsec != ANDROID_UNKNOWN_TIME);
482 readOptions.setSeekTo(mSeekTimeMsec * 1000);
483 }
484
485 int64_t timeUsec = ANDROID_UNKNOWN_TIME;
486 {
487 Mutex::Autolock _l(mBufferSourceLock);
488
489 if (NULL != mDecodeBuffer) {
490 // the current decoded buffer hasn't been rendered, drop it
491 mDecodeBuffer->release();
492 mDecodeBuffer = NULL;
493 }
494 if (!mAudioSourceStarted) {
495 return;
496 }
497 err = mAudioSource->read(&mDecodeBuffer, &readOptions);
498 if (err == OK) {
499 // FIXME workaround apparent bug in AAC decoder: kKeyTime is 3 frames old if length is 0
500 if (mDecodeBuffer->range_length() == 0) {
501 timeUsec = ANDROID_UNKNOWN_TIME;
502 } else {
503 CHECK(mDecodeBuffer->meta_data().findInt64(kKeyTime, &timeUsec));
504 }
505 } else {
506 // errors are handled below
507 }
508 }
509
510 {
511 Mutex::Autolock _l(mTimeLock);
512 if (mStateFlags & kFlagSeeking) {
513 mStateFlags &= ~kFlagSeeking;
514 mSeekTimeMsec = ANDROID_UNKNOWN_TIME;
515 }
516 if (timeUsec != ANDROID_UNKNOWN_TIME) {
517 // Note that though we've decoded this position, we haven't rendered it yet.
518 // So a GetPosition called after this point will observe the advanced position,
519 // even though the PCM may not have been supplied to the sink. That's OK as
520 // we don't claim to provide AAC frame-accurate (let alone sample-accurate) GetPosition.
521 mLastDecodedPositionUs = timeUsec;
522 }
523 }
524
525 //-------------------------------- Handle return of decode
526 if (err != OK) {
527 bool continueDecoding = false;
528 switch (err) {
529 case ERROR_END_OF_STREAM:
530 if (0 < mDurationUsec) {
531 Mutex::Autolock _l(mTimeLock);
532 mLastDecodedPositionUs = mDurationUsec;
533 }
534 // handle notification and looping at end of stream
535 if (mStateFlags & kFlagPlaying) {
536 notify(PLAYEREVENT_ENDOFSTREAM, 1, true /*async*/);
537 }
538 if (mStateFlags & kFlagLooping) {
539 seek(0);
540 // kick-off decoding again
541 continueDecoding = true;
542 }
543 break;
544 case INFO_FORMAT_CHANGED:
545 SL_LOGD("MediaSource::read encountered INFO_FORMAT_CHANGED");
546 // reconfigure output
547 {
548 Mutex::Autolock _l(mBufferSourceLock);
549 hasNewDecodeParams();
550 }
551 continueDecoding = true;
552 break;
553 case INFO_DISCONTINUITY:
554 SL_LOGD("MediaSource::read encountered INFO_DISCONTINUITY");
555 continueDecoding = true;
556 break;
557 default:
558 SL_LOGE("MediaSource::read returned error %d", err);
559 break;
560 }
561 if (continueDecoding) {
562 if (NULL == mDecodeBuffer) {
563 (new AMessage(kWhatDecode, this))->post();
564 return;
565 }
566 } else {
567 return;
568 }
569 }
570
571 //-------------------------------- Render
572 sp<AMessage> msg = new AMessage(kWhatRender, this);
573 msg->post();
574
575 }
576
577
onMessageReceived(const sp<AMessage> & msg)578 void AudioSfDecoder::onMessageReceived(const sp<AMessage> &msg) {
579 switch (msg->what()) {
580 case kWhatDecode:
581 onDecode();
582 break;
583
584 case kWhatRender:
585 onRender();
586 break;
587
588 case kWhatCheckCache:
589 onCheckCache(msg);
590 break;
591
592 default:
593 GenericPlayer::onMessageReceived(msg);
594 break;
595 }
596 }
597
598 //--------------------------------------------------
599 // Prepared state, prefetch status notifications
notifyPrepared(status_t prepareRes)600 void AudioSfDecoder::notifyPrepared(status_t prepareRes) {
601 assert(!(mStateFlags & (kFlagPrepared | kFlagPreparedUnsuccessfully)));
602 if (NO_ERROR == prepareRes) {
603 // The "then" fork is not currently used, but is kept here to make it easier
604 // to replace by a new signalPrepareCompletion(status) if we re-visit this later.
605 mStateFlags |= kFlagPrepared;
606 } else {
607 mStateFlags |= kFlagPreparedUnsuccessfully;
608 }
609 // Do not call the superclass onPrepare to notify, because it uses a default error
610 // status code but we can provide a more specific one.
611 // GenericPlayer::onPrepare();
612 notify(PLAYEREVENT_PREPARED, (int32_t)prepareRes, true /*async*/);
613 SL_LOGD("AudioSfDecoder::onPrepare() done, mStateFlags=0x%x", mStateFlags);
614 }
615
616
onNotify(const sp<AMessage> & msg)617 void AudioSfDecoder::onNotify(const sp<AMessage> &msg) {
618 notif_cbf_t notifyClient;
619 void* notifyUser;
620 {
621 android::Mutex::Autolock autoLock(mNotifyClientLock);
622 if (NULL == mNotifyClient) {
623 return;
624 } else {
625 notifyClient = mNotifyClient;
626 notifyUser = mNotifyUser;
627 }
628 }
629 int32_t val;
630 if (msg->findInt32(PLAYEREVENT_PREFETCHSTATUSCHANGE, &val)) {
631 SL_LOGV("\tASfPlayer notifying %s = %d", PLAYEREVENT_PREFETCHSTATUSCHANGE, val);
632 notifyClient(kEventPrefetchStatusChange, val, 0, notifyUser);
633 }
634 else if (msg->findInt32(PLAYEREVENT_PREFETCHFILLLEVELUPDATE, &val)) {
635 SL_LOGV("\tASfPlayer notifying %s = %d", PLAYEREVENT_PREFETCHFILLLEVELUPDATE, val);
636 notifyClient(kEventPrefetchFillLevelUpdate, val, 0, notifyUser);
637 }
638 else if (msg->findInt32(PLAYEREVENT_ENDOFSTREAM, &val)) {
639 SL_LOGV("\tASfPlayer notifying %s = %d", PLAYEREVENT_ENDOFSTREAM, val);
640 notifyClient(kEventEndOfStream, val, 0, notifyUser);
641 }
642 else {
643 GenericPlayer::onNotify(msg);
644 }
645 }
646
647
648 //--------------------------------------------------
649 // Private utility functions
650
wantPrefetch()651 bool AudioSfDecoder::wantPrefetch() {
652 if (mDataSource != 0) {
653 return (mDataSource->flags() & DataSource::kWantsPrefetching);
654 } else {
655 // happens if an improper data locator was passed, if the media extractor couldn't be
656 // initialized, if there is no audio track in the media, if the OMX decoder couldn't be
657 // instantiated, if the source couldn't be opened, or if the MediaSource
658 // couldn't be started
659 SL_LOGV("AudioSfDecoder::wantPrefetch() tries to access NULL mDataSource");
660 return false;
661 }
662 }
663
664
getPositionUsec()665 int64_t AudioSfDecoder::getPositionUsec() {
666 Mutex::Autolock _l(mTimeLock);
667 if (mStateFlags & kFlagSeeking) {
668 return mSeekTimeMsec * 1000;
669 } else {
670 return mLastDecodedPositionUs;
671 }
672 }
673
674
getCacheRemaining(bool * eos)675 CacheStatus_t AudioSfDecoder::getCacheRemaining(bool *eos) {
676 sp<NuCachedSource2> cachedSource =
677 static_cast<NuCachedSource2 *>(mDataSource.get());
678
679 CacheStatus_t oldStatus = mCacheStatus;
680
681 status_t finalStatus;
682 size_t dataRemaining = cachedSource->approxDataRemaining(&finalStatus);
683 *eos = (finalStatus != OK);
684
685 CHECK_GE(mBitrate, 0);
686
687 int64_t dataRemainingUs = dataRemaining * 8000000LL / mBitrate;
688 //SL_LOGV("AudioSfDecoder::getCacheRemaining: approx %.2f secs remaining (eos=%d)",
689 // dataRemainingUs / 1E6, *eos);
690
691 if (*eos) {
692 // data is buffered up to the end of the stream, it can't get any better than this
693 mCacheStatus = kStatusHigh;
694 mCacheFill = 1000;
695
696 } else {
697 if (mDurationUsec > 0) {
698 // known duration:
699
700 // fill level is ratio of how much has been played + how much is
701 // cached, divided by total duration
702 int64_t currentPositionUsec = getPositionUsec();
703 if (currentPositionUsec == ANDROID_UNKNOWN_TIME) {
704 // if we don't know where we are, assume the worst for the fill ratio
705 currentPositionUsec = 0;
706 }
707 if (mDurationUsec > 0) {
708 mCacheFill = (int16_t) ((1000.0
709 * (double)(currentPositionUsec + dataRemainingUs) / mDurationUsec));
710 } else {
711 mCacheFill = 0;
712 }
713 //SL_LOGV("cacheFill = %d", mCacheFill);
714
715 // cache status is evaluated against duration thresholds
716 if (dataRemainingUs > DURATION_CACHED_HIGH_MS*1000) {
717 mCacheStatus = kStatusHigh;
718 //ALOGV("high");
719 } else if (dataRemainingUs > DURATION_CACHED_MED_MS*1000) {
720 //ALOGV("enough");
721 mCacheStatus = kStatusEnough;
722 } else if (dataRemainingUs < DURATION_CACHED_LOW_MS*1000) {
723 //ALOGV("low");
724 mCacheStatus = kStatusLow;
725 } else {
726 mCacheStatus = kStatusIntermediate;
727 }
728
729 } else {
730 // unknown duration:
731
732 // cache status is evaluated against cache amount thresholds
733 // (no duration so we don't have the bitrate either, could be derived from format?)
734 if (dataRemaining > SIZE_CACHED_HIGH_BYTES) {
735 mCacheStatus = kStatusHigh;
736 } else if (dataRemaining > SIZE_CACHED_MED_BYTES) {
737 mCacheStatus = kStatusEnough;
738 } else if (dataRemaining < SIZE_CACHED_LOW_BYTES) {
739 mCacheStatus = kStatusLow;
740 } else {
741 mCacheStatus = kStatusIntermediate;
742 }
743 }
744
745 }
746
747 if (oldStatus != mCacheStatus) {
748 notifyStatus();
749 }
750
751 if (abs(mCacheFill - mLastNotifiedCacheFill) > mCacheFillNotifThreshold) {
752 notifyCacheFill();
753 }
754
755 return mCacheStatus;
756 }
757
758
hasNewDecodeParams()759 void AudioSfDecoder::hasNewDecodeParams() {
760
761 if ((mAudioSource != 0) && mAudioSourceStarted) {
762 sp<MetaData> meta = mAudioSource->getFormat();
763
764 int32_t channelCount;
765 CHECK(meta->findInt32(kKeyChannelCount, &channelCount));
766 int32_t sr;
767 CHECK(meta->findInt32(kKeySampleRate, &sr));
768
769 // FIXME similar to onPrepare()
770 {
771 android::Mutex::Autolock autoLock(mPcmFormatLock);
772 SL_LOGV("format changed: old sr=%d, channels=%d; new sr=%d, channels=%d",
773 mPcmFormatValues[ANDROID_KEY_INDEX_PCMFORMAT_SAMPLERATE],
774 mPcmFormatValues[ANDROID_KEY_INDEX_PCMFORMAT_NUMCHANNELS],
775 sr, channelCount);
776 mPcmFormatValues[ANDROID_KEY_INDEX_PCMFORMAT_NUMCHANNELS] = channelCount;
777 mPcmFormatValues[ANDROID_KEY_INDEX_PCMFORMAT_SAMPLERATE] = sr;
778 mPcmFormatValues[ANDROID_KEY_INDEX_PCMFORMAT_CHANNELMASK] =
779 sles_channel_out_mask_from_count(channelCount);
780 }
781 // there's no need to do a notify of PLAYEREVENT_CHANNEL_COUNT,
782 // because the only listener is for volume updates, and decoders don't support that
783 }
784
785 // alert users of those params
786 updateAudioSink();
787 }
788
789 static const char* const kPlaybackOnlyCodecs[] = { MEDIA_MIMETYPE_AUDIO_AMR_NB,
790 MEDIA_MIMETYPE_AUDIO_AMR_WB };
791 #define NB_PLAYBACK_ONLY_CODECS (sizeof(kPlaybackOnlyCodecs)/sizeof(kPlaybackOnlyCodecs[0]))
792
isSupportedCodec(const char * mime)793 bool AudioSfDecoder::isSupportedCodec(const char* mime) {
794 bool codecRequiresPermission = false;
795 for (unsigned int i = 0 ; i < NB_PLAYBACK_ONLY_CODECS ; i++) {
796 if (!strcasecmp(mime, kPlaybackOnlyCodecs[i])) {
797 codecRequiresPermission = true;
798 break;
799 }
800 }
801 if (codecRequiresPermission) {
802 // verify only the system can decode, for playback only
803 return checkCallingPermission(
804 String16("android.permission.ALLOW_ANY_CODEC_FOR_PLAYBACK"));
805 } else {
806 return true;
807 }
808 }
809
810 } // namespace android
811