1 /*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 //#define USE_LOG SLAndroidLogLevel_Verbose
18
19 #include "sles_allinclusive.h"
20 #include "android/android_AudioSfDecoder.h"
21
22 #include <binder/IServiceManager.h>
23 #include <media/IMediaHTTPService.h>
24 #include <media/stagefright/foundation/ADebug.h>
25
26
27 #define SIZE_CACHED_HIGH_BYTES 1000000
28 #define SIZE_CACHED_MED_BYTES 700000
29 #define SIZE_CACHED_LOW_BYTES 400000
30
31 namespace android {
32
33 //--------------------------------------------------------------------------------------------------
AudioSfDecoder(const AudioPlayback_Parameters * params)34 AudioSfDecoder::AudioSfDecoder(const AudioPlayback_Parameters* params) : GenericPlayer(params),
35 mDataSource(0),
36 mAudioSource(0),
37 mAudioSourceStarted(false),
38 mBitrate(-1),
39 mDurationUsec(ANDROID_UNKNOWN_TIME),
40 mDecodeBuffer(NULL),
41 mSeekTimeMsec(0),
42 // play event logic depends on the initial time being zero not ANDROID_UNKNOWN_TIME
43 mLastDecodedPositionUs(0)
44 {
45 SL_LOGD("AudioSfDecoder::AudioSfDecoder()");
46 }
47
48
~AudioSfDecoder()49 AudioSfDecoder::~AudioSfDecoder() {
50 SL_LOGD("AudioSfDecoder::~AudioSfDecoder()");
51 }
52
53
preDestroy()54 void AudioSfDecoder::preDestroy() {
55 GenericPlayer::preDestroy();
56 SL_LOGD("AudioSfDecoder::preDestroy()");
57 {
58 Mutex::Autolock _l(mBufferSourceLock);
59
60 if (NULL != mDecodeBuffer) {
61 mDecodeBuffer->release();
62 mDecodeBuffer = NULL;
63 }
64
65 if ((mAudioSource != 0) && mAudioSourceStarted) {
66 mAudioSource->stop();
67 mAudioSourceStarted = false;
68 }
69 }
70 }
71
72
73 //--------------------------------------------------
play()74 void AudioSfDecoder::play() {
75 SL_LOGD("AudioSfDecoder::play");
76
77 GenericPlayer::play();
78 (new AMessage(kWhatDecode, id()))->post();
79 }
80
81
getPositionMsec(int * msec)82 void AudioSfDecoder::getPositionMsec(int* msec) {
83 int64_t timeUsec = getPositionUsec();
84 if (timeUsec == ANDROID_UNKNOWN_TIME) {
85 *msec = ANDROID_UNKNOWN_TIME;
86 } else {
87 *msec = timeUsec / 1000;
88 }
89 }
90
91
92 //--------------------------------------------------
getPcmFormatKeyCount() const93 uint32_t AudioSfDecoder::getPcmFormatKeyCount() const {
94 return NB_PCMMETADATA_KEYS;
95 }
96
97
98 //--------------------------------------------------
getPcmFormatKeySize(uint32_t index,uint32_t * pKeySize)99 bool AudioSfDecoder::getPcmFormatKeySize(uint32_t index, uint32_t* pKeySize) {
100 if (index >= NB_PCMMETADATA_KEYS) {
101 return false;
102 } else {
103 *pKeySize = strlen(kPcmDecodeMetadataKeys[index]) +1;
104 return true;
105 }
106 }
107
108
109 //--------------------------------------------------
getPcmFormatKeyName(uint32_t index,uint32_t keySize,char * keyName)110 bool AudioSfDecoder::getPcmFormatKeyName(uint32_t index, uint32_t keySize, char* keyName) {
111 uint32_t actualKeySize;
112 if (!getPcmFormatKeySize(index, &actualKeySize)) {
113 return false;
114 }
115 if (keySize < actualKeySize) {
116 return false;
117 }
118 strncpy(keyName, kPcmDecodeMetadataKeys[index], actualKeySize);
119 return true;
120 }
121
122
123 //--------------------------------------------------
getPcmFormatValueSize(uint32_t index,uint32_t * pValueSize)124 bool AudioSfDecoder::getPcmFormatValueSize(uint32_t index, uint32_t* pValueSize) {
125 if (index >= NB_PCMMETADATA_KEYS) {
126 *pValueSize = 0;
127 return false;
128 } else {
129 *pValueSize = sizeof(uint32_t);
130 return true;
131 }
132 }
133
134
135 //--------------------------------------------------
getPcmFormatKeyValue(uint32_t index,uint32_t size,uint32_t * pValue)136 bool AudioSfDecoder::getPcmFormatKeyValue(uint32_t index, uint32_t size, uint32_t* pValue) {
137 uint32_t valueSize = 0;
138 if (!getPcmFormatValueSize(index, &valueSize)) {
139 return false;
140 } else if (size != valueSize) {
141 // this ensures we are accessing mPcmFormatValues with a valid size for that index
142 SL_LOGE("Error retrieving metadata value at index %d: using size of %d, should be %d",
143 index, size, valueSize);
144 return false;
145 } else {
146 android::Mutex::Autolock autoLock(mPcmFormatLock);
147 *pValue = mPcmFormatValues[index];
148 return true;
149 }
150 }
151
152
153 //--------------------------------------------------
154 // Event handlers
155 // it is strictly verboten to call those methods outside of the event loop
156
157 // Initializes the data and audio sources, and update the PCM format info
158 // post-condition: upon successful initialization based on the player data locator
159 // GenericPlayer::onPrepare() was called
160 // mDataSource != 0
161 // mAudioSource != 0
162 // mAudioSourceStarted == true
163 // All error returns from this method are via notifyPrepared(status) followed by "return".
onPrepare()164 void AudioSfDecoder::onPrepare() {
165 SL_LOGD("AudioSfDecoder::onPrepare()");
166 Mutex::Autolock _l(mBufferSourceLock);
167
168 {
169 android::Mutex::Autolock autoLock(mPcmFormatLock);
170 // Initialize the PCM format info with the known parameters before the start of the decode
171 mPcmFormatValues[ANDROID_KEY_INDEX_PCMFORMAT_BITSPERSAMPLE] = SL_PCMSAMPLEFORMAT_FIXED_16;
172 mPcmFormatValues[ANDROID_KEY_INDEX_PCMFORMAT_CONTAINERSIZE] = 16;
173 mPcmFormatValues[ANDROID_KEY_INDEX_PCMFORMAT_ENDIANNESS] = SL_BYTEORDER_LITTLEENDIAN;
174 // initialization with the default values: they will be replaced by the actual values
175 // once the decoder has figured them out
176 mPcmFormatValues[ANDROID_KEY_INDEX_PCMFORMAT_NUMCHANNELS] = UNKNOWN_NUMCHANNELS;
177 mPcmFormatValues[ANDROID_KEY_INDEX_PCMFORMAT_SAMPLERATE] = UNKNOWN_SAMPLERATE;
178 mPcmFormatValues[ANDROID_KEY_INDEX_PCMFORMAT_CHANNELMASK] = UNKNOWN_CHANNELMASK;
179 }
180
181 //---------------------------------
182 // Instantiate and initialize the data source for the decoder
183 sp<DataSource> dataSource;
184
185 switch (mDataLocatorType) {
186
187 case kDataLocatorNone:
188 SL_LOGE("AudioSfDecoder::onPrepare: no data locator set");
189 notifyPrepared(MEDIA_ERROR_BASE);
190 return;
191
192 case kDataLocatorUri:
193 dataSource = DataSource::CreateFromURI(
194 NULL /* XXX httpService */, mDataLocator.uriRef);
195 if (dataSource == NULL) {
196 SL_LOGE("AudioSfDecoder::onPrepare(): Error opening %s", mDataLocator.uriRef);
197 notifyPrepared(MEDIA_ERROR_BASE);
198 return;
199 }
200 break;
201
202 case kDataLocatorFd:
203 {
204 // As FileSource unconditionally takes ownership of the fd and closes it, then
205 // we have to make a dup for FileSource if the app wants to keep ownership itself
206 int fd = mDataLocator.fdi.fd;
207 if (mDataLocator.fdi.mCloseAfterUse) {
208 mDataLocator.fdi.mCloseAfterUse = false;
209 } else {
210 fd = ::dup(fd);
211 }
212 dataSource = new FileSource(fd, mDataLocator.fdi.offset, mDataLocator.fdi.length);
213 status_t err = dataSource->initCheck();
214 if (err != OK) {
215 notifyPrepared(err);
216 return;
217 }
218 break;
219 }
220
221 // AndroidBufferQueue data source is handled by a subclass,
222 // which does not call up to this method. Hence, the missing case.
223 default:
224 TRESPASS();
225 }
226
227 //---------------------------------
228 // Instantiate and initialize the decoder attached to the data source
229 sp<MediaExtractor> extractor = MediaExtractor::Create(dataSource);
230 if (extractor == NULL) {
231 SL_LOGE("AudioSfDecoder::onPrepare: Could not instantiate extractor.");
232 notifyPrepared(ERROR_UNSUPPORTED);
233 return;
234 }
235
236 ssize_t audioTrackIndex = -1;
237 bool isRawAudio = false;
238 for (size_t i = 0; i < extractor->countTracks(); ++i) {
239 sp<MetaData> meta = extractor->getTrackMetaData(i);
240
241 const char *mime;
242 CHECK(meta->findCString(kKeyMIMEType, &mime));
243
244 if (!strncasecmp("audio/", mime, 6)) {
245 if (isSupportedCodec(mime)) {
246 audioTrackIndex = i;
247
248 if (!strcasecmp(MEDIA_MIMETYPE_AUDIO_RAW, mime)) {
249 isRawAudio = true;
250 }
251 break;
252 }
253 }
254 }
255
256 if (audioTrackIndex < 0) {
257 SL_LOGE("AudioSfDecoder::onPrepare: Could not find a supported audio track.");
258 notifyPrepared(ERROR_UNSUPPORTED);
259 return;
260 }
261
262 sp<MediaSource> source = extractor->getTrack(audioTrackIndex);
263 sp<MetaData> meta = source->getFormat();
264
265 // we can't trust the OMXCodec (if there is one) to issue a INFO_FORMAT_CHANGED so we want
266 // to have some meaningful values as soon as possible.
267 int32_t channelCount;
268 bool hasChannelCount = meta->findInt32(kKeyChannelCount, &channelCount);
269 int32_t sr;
270 bool hasSampleRate = meta->findInt32(kKeySampleRate, &sr);
271
272 // first compute the duration
273 off64_t size;
274 int64_t durationUs;
275 int32_t durationMsec;
276 if (dataSource->getSize(&size) == OK
277 && meta->findInt64(kKeyDuration, &durationUs)) {
278 if (durationUs != 0) {
279 mBitrate = size * 8000000ll / durationUs; // in bits/sec
280 } else {
281 mBitrate = -1;
282 }
283 mDurationUsec = durationUs;
284 durationMsec = durationUs / 1000;
285 } else {
286 mBitrate = -1;
287 mDurationUsec = ANDROID_UNKNOWN_TIME;
288 durationMsec = ANDROID_UNKNOWN_TIME;
289 }
290
291 // then assign the duration under the settings lock
292 {
293 Mutex::Autolock _l(mSettingsLock);
294 mDurationMsec = durationMsec;
295 }
296
297 // the audio content is not raw PCM, so we need a decoder
298 if (!isRawAudio) {
299 OMXClient client;
300 CHECK_EQ(client.connect(), (status_t)OK);
301
302 source = OMXCodec::Create(
303 client.interface(), meta, false /* createEncoder */,
304 source);
305
306 if (source == NULL) {
307 SL_LOGE("AudioSfDecoder::onPrepare: Could not instantiate decoder.");
308 notifyPrepared(ERROR_UNSUPPORTED);
309 return;
310 }
311
312 meta = source->getFormat();
313 }
314
315
316 if (source->start() != OK) {
317 SL_LOGE("AudioSfDecoder::onPrepare: Failed to start source/decoder.");
318 notifyPrepared(MEDIA_ERROR_BASE);
319 return;
320 }
321
322 //---------------------------------
323 // The data source, and audio source (a decoder if required) are ready to be used
324 mDataSource = dataSource;
325 mAudioSource = source;
326 mAudioSourceStarted = true;
327
328 if (!hasChannelCount) {
329 CHECK(meta->findInt32(kKeyChannelCount, &channelCount));
330 }
331
332 if (!hasSampleRate) {
333 CHECK(meta->findInt32(kKeySampleRate, &sr));
334 }
335 // FIXME add code below once channel mask support is in, currently initialized to default
336 // value computed from the channel count
337 // if (!hasChannelMask) {
338 // CHECK(meta->findInt32(kKeyChannelMask, &channelMask));
339 // }
340
341 if (!wantPrefetch()) {
342 SL_LOGV("AudioSfDecoder::onPrepare: no need to prefetch");
343 // doesn't need prefetching, notify good to go
344 mCacheStatus = kStatusHigh;
345 mCacheFill = 1000;
346 notifyStatus();
347 notifyCacheFill();
348 }
349
350 {
351 android::Mutex::Autolock autoLock(mPcmFormatLock);
352 mPcmFormatValues[ANDROID_KEY_INDEX_PCMFORMAT_SAMPLERATE] = sr;
353 mPcmFormatValues[ANDROID_KEY_INDEX_PCMFORMAT_NUMCHANNELS] = channelCount;
354 mPcmFormatValues[ANDROID_KEY_INDEX_PCMFORMAT_CHANNELMASK] =
355 channelCountToMask(channelCount);
356 }
357
358 // at this point we have enough information about the source to create the sink that
359 // will consume the data
360 createAudioSink();
361
362 // signal successful completion of prepare
363 mStateFlags |= kFlagPrepared;
364
365 GenericPlayer::onPrepare();
366 SL_LOGD("AudioSfDecoder::onPrepare() done, mStateFlags=0x%x", mStateFlags);
367 }
368
369
onPause()370 void AudioSfDecoder::onPause() {
371 SL_LOGV("AudioSfDecoder::onPause()");
372 GenericPlayer::onPause();
373 pauseAudioSink();
374 }
375
376
onPlay()377 void AudioSfDecoder::onPlay() {
378 SL_LOGV("AudioSfDecoder::onPlay()");
379 GenericPlayer::onPlay();
380 startAudioSink();
381 }
382
383
onSeek(const sp<AMessage> & msg)384 void AudioSfDecoder::onSeek(const sp<AMessage> &msg) {
385 SL_LOGV("AudioSfDecoder::onSeek");
386 int64_t timeMsec;
387 CHECK(msg->findInt64(WHATPARAM_SEEK_SEEKTIME_MS, &timeMsec));
388
389 Mutex::Autolock _l(mTimeLock);
390 mStateFlags |= kFlagSeeking;
391 mSeekTimeMsec = timeMsec;
392 // don't set mLastDecodedPositionUs to ANDROID_UNKNOWN_TIME; getPositionUsec
393 // ignores mLastDecodedPositionUs while seeking, and substitutes the seek goal instead
394
395 // nop for now
396 GenericPlayer::onSeek(msg);
397 }
398
399
onLoop(const sp<AMessage> & msg)400 void AudioSfDecoder::onLoop(const sp<AMessage> &msg) {
401 SL_LOGV("AudioSfDecoder::onLoop");
402 int32_t loop;
403 CHECK(msg->findInt32(WHATPARAM_LOOP_LOOPING, &loop));
404
405 if (loop) {
406 //SL_LOGV("AudioSfDecoder::onLoop start looping");
407 mStateFlags |= kFlagLooping;
408 } else {
409 //SL_LOGV("AudioSfDecoder::onLoop stop looping");
410 mStateFlags &= ~kFlagLooping;
411 }
412
413 // nop for now
414 GenericPlayer::onLoop(msg);
415 }
416
417
onCheckCache(const sp<AMessage> & msg)418 void AudioSfDecoder::onCheckCache(const sp<AMessage> &msg) {
419 //SL_LOGV("AudioSfDecoder::onCheckCache");
420 bool eos;
421 CacheStatus_t status = getCacheRemaining(&eos);
422
423 if (eos || status == kStatusHigh
424 || ((mStateFlags & kFlagPreparing) && (status >= kStatusEnough))) {
425 if (mStateFlags & kFlagPlaying) {
426 startAudioSink();
427 }
428 mStateFlags &= ~kFlagBuffering;
429
430 SL_LOGV("AudioSfDecoder::onCheckCache: buffering done.");
431
432 if (mStateFlags & kFlagPreparing) {
433 //SL_LOGV("AudioSfDecoder::onCheckCache: preparation done.");
434 mStateFlags &= ~kFlagPreparing;
435 }
436
437 if (mStateFlags & kFlagPlaying) {
438 (new AMessage(kWhatDecode, id()))->post();
439 }
440 return;
441 }
442
443 msg->post(100000);
444 }
445
446
onDecode()447 void AudioSfDecoder::onDecode() {
448 SL_LOGV("AudioSfDecoder::onDecode");
449
450 //-------------------------------- Need to buffer some more before decoding?
451 bool eos;
452 if (mDataSource == 0) {
453 // application set play state to paused which failed, then set play state to playing
454 return;
455 }
456
457 if (wantPrefetch()
458 && (getCacheRemaining(&eos) == kStatusLow)
459 && !eos) {
460 SL_LOGV("buffering more.");
461
462 if (mStateFlags & kFlagPlaying) {
463 pauseAudioSink();
464 }
465 mStateFlags |= kFlagBuffering;
466 (new AMessage(kWhatCheckCache, id()))->post(100000);
467 return;
468 }
469
470 if (!(mStateFlags & (kFlagPlaying | kFlagBuffering | kFlagPreparing))) {
471 // don't decode if we're not buffering, prefetching or playing
472 //SL_LOGV("don't decode: not buffering, prefetching or playing");
473 return;
474 }
475
476 //-------------------------------- Decode
477 status_t err;
478 MediaSource::ReadOptions readOptions;
479 if (mStateFlags & kFlagSeeking) {
480 assert(mSeekTimeMsec != ANDROID_UNKNOWN_TIME);
481 readOptions.setSeekTo(mSeekTimeMsec * 1000);
482 }
483
484 int64_t timeUsec = ANDROID_UNKNOWN_TIME;
485 {
486 Mutex::Autolock _l(mBufferSourceLock);
487
488 if (NULL != mDecodeBuffer) {
489 // the current decoded buffer hasn't been rendered, drop it
490 mDecodeBuffer->release();
491 mDecodeBuffer = NULL;
492 }
493 if (!mAudioSourceStarted) {
494 return;
495 }
496 err = mAudioSource->read(&mDecodeBuffer, &readOptions);
497 if (err == OK) {
498 // FIXME workaround apparent bug in AAC decoder: kKeyTime is 3 frames old if length is 0
499 if (mDecodeBuffer->range_length() == 0) {
500 timeUsec = ANDROID_UNKNOWN_TIME;
501 } else {
502 CHECK(mDecodeBuffer->meta_data()->findInt64(kKeyTime, &timeUsec));
503 }
504 } else {
505 // errors are handled below
506 }
507 }
508
509 {
510 Mutex::Autolock _l(mTimeLock);
511 if (mStateFlags & kFlagSeeking) {
512 mStateFlags &= ~kFlagSeeking;
513 mSeekTimeMsec = ANDROID_UNKNOWN_TIME;
514 }
515 if (timeUsec != ANDROID_UNKNOWN_TIME) {
516 // Note that though we've decoded this position, we haven't rendered it yet.
517 // So a GetPosition called after this point will observe the advanced position,
518 // even though the PCM may not have been supplied to the sink. That's OK as
519 // we don't claim to provide AAC frame-accurate (let alone sample-accurate) GetPosition.
520 mLastDecodedPositionUs = timeUsec;
521 }
522 }
523
524 //-------------------------------- Handle return of decode
525 if (err != OK) {
526 bool continueDecoding = false;
527 switch(err) {
528 case ERROR_END_OF_STREAM:
529 if (0 < mDurationUsec) {
530 Mutex::Autolock _l(mTimeLock);
531 mLastDecodedPositionUs = mDurationUsec;
532 }
533 // handle notification and looping at end of stream
534 if (mStateFlags & kFlagPlaying) {
535 notify(PLAYEREVENT_ENDOFSTREAM, 1, true /*async*/);
536 }
537 if (mStateFlags & kFlagLooping) {
538 seek(0);
539 // kick-off decoding again
540 continueDecoding = true;
541 }
542 break;
543 case INFO_FORMAT_CHANGED:
544 SL_LOGD("MediaSource::read encountered INFO_FORMAT_CHANGED");
545 // reconfigure output
546 {
547 Mutex::Autolock _l(mBufferSourceLock);
548 hasNewDecodeParams();
549 }
550 continueDecoding = true;
551 break;
552 case INFO_DISCONTINUITY:
553 SL_LOGD("MediaSource::read encountered INFO_DISCONTINUITY");
554 continueDecoding = true;
555 break;
556 default:
557 SL_LOGE("MediaSource::read returned error %d", err);
558 break;
559 }
560 if (continueDecoding) {
561 if (NULL == mDecodeBuffer) {
562 (new AMessage(kWhatDecode, id()))->post();
563 return;
564 }
565 } else {
566 return;
567 }
568 }
569
570 //-------------------------------- Render
571 sp<AMessage> msg = new AMessage(kWhatRender, id());
572 msg->post();
573
574 }
575
576
onMessageReceived(const sp<AMessage> & msg)577 void AudioSfDecoder::onMessageReceived(const sp<AMessage> &msg) {
578 switch (msg->what()) {
579 case kWhatDecode:
580 onDecode();
581 break;
582
583 case kWhatRender:
584 onRender();
585 break;
586
587 case kWhatCheckCache:
588 onCheckCache(msg);
589 break;
590
591 default:
592 GenericPlayer::onMessageReceived(msg);
593 break;
594 }
595 }
596
597 //--------------------------------------------------
598 // Prepared state, prefetch status notifications
notifyPrepared(status_t prepareRes)599 void AudioSfDecoder::notifyPrepared(status_t prepareRes) {
600 assert(!(mStateFlags & (kFlagPrepared | kFlagPreparedUnsuccessfully)));
601 if (NO_ERROR == prepareRes) {
602 // The "then" fork is not currently used, but is kept here to make it easier
603 // to replace by a new signalPrepareCompletion(status) if we re-visit this later.
604 mStateFlags |= kFlagPrepared;
605 } else {
606 mStateFlags |= kFlagPreparedUnsuccessfully;
607 }
608 // Do not call the superclass onPrepare to notify, because it uses a default error
609 // status code but we can provide a more specific one.
610 // GenericPlayer::onPrepare();
611 notify(PLAYEREVENT_PREPARED, (int32_t)prepareRes, true /*async*/);
612 SL_LOGD("AudioSfDecoder::onPrepare() done, mStateFlags=0x%x", mStateFlags);
613 }
614
615
onNotify(const sp<AMessage> & msg)616 void AudioSfDecoder::onNotify(const sp<AMessage> &msg) {
617 notif_cbf_t notifyClient;
618 void* notifyUser;
619 {
620 android::Mutex::Autolock autoLock(mNotifyClientLock);
621 if (NULL == mNotifyClient) {
622 return;
623 } else {
624 notifyClient = mNotifyClient;
625 notifyUser = mNotifyUser;
626 }
627 }
628 int32_t val;
629 if (msg->findInt32(PLAYEREVENT_PREFETCHSTATUSCHANGE, &val)) {
630 SL_LOGV("\tASfPlayer notifying %s = %d", PLAYEREVENT_PREFETCHSTATUSCHANGE, val);
631 notifyClient(kEventPrefetchStatusChange, val, 0, notifyUser);
632 }
633 else if (msg->findInt32(PLAYEREVENT_PREFETCHFILLLEVELUPDATE, &val)) {
634 SL_LOGV("\tASfPlayer notifying %s = %d", PLAYEREVENT_PREFETCHFILLLEVELUPDATE, val);
635 notifyClient(kEventPrefetchFillLevelUpdate, val, 0, notifyUser);
636 }
637 else if (msg->findInt32(PLAYEREVENT_ENDOFSTREAM, &val)) {
638 SL_LOGV("\tASfPlayer notifying %s = %d", PLAYEREVENT_ENDOFSTREAM, val);
639 notifyClient(kEventEndOfStream, val, 0, notifyUser);
640 }
641 else {
642 GenericPlayer::onNotify(msg);
643 }
644 }
645
646
647 //--------------------------------------------------
648 // Private utility functions
649
wantPrefetch()650 bool AudioSfDecoder::wantPrefetch() {
651 if (mDataSource != 0) {
652 return (mDataSource->flags() & DataSource::kWantsPrefetching);
653 } else {
654 // happens if an improper data locator was passed, if the media extractor couldn't be
655 // initialized, if there is no audio track in the media, if the OMX decoder couldn't be
656 // instantiated, if the source couldn't be opened, or if the MediaSource
657 // couldn't be started
658 SL_LOGV("AudioSfDecoder::wantPrefetch() tries to access NULL mDataSource");
659 return false;
660 }
661 }
662
663
getPositionUsec()664 int64_t AudioSfDecoder::getPositionUsec() {
665 Mutex::Autolock _l(mTimeLock);
666 if (mStateFlags & kFlagSeeking) {
667 return mSeekTimeMsec * 1000;
668 } else {
669 return mLastDecodedPositionUs;
670 }
671 }
672
673
getCacheRemaining(bool * eos)674 CacheStatus_t AudioSfDecoder::getCacheRemaining(bool *eos) {
675 sp<NuCachedSource2> cachedSource =
676 static_cast<NuCachedSource2 *>(mDataSource.get());
677
678 CacheStatus_t oldStatus = mCacheStatus;
679
680 status_t finalStatus;
681 size_t dataRemaining = cachedSource->approxDataRemaining(&finalStatus);
682 *eos = (finalStatus != OK);
683
684 CHECK_GE(mBitrate, 0);
685
686 int64_t dataRemainingUs = dataRemaining * 8000000ll / mBitrate;
687 //SL_LOGV("AudioSfDecoder::getCacheRemaining: approx %.2f secs remaining (eos=%d)",
688 // dataRemainingUs / 1E6, *eos);
689
690 if (*eos) {
691 // data is buffered up to the end of the stream, it can't get any better than this
692 mCacheStatus = kStatusHigh;
693 mCacheFill = 1000;
694
695 } else {
696 if (mDurationUsec > 0) {
697 // known duration:
698
699 // fill level is ratio of how much has been played + how much is
700 // cached, divided by total duration
701 int64_t currentPositionUsec = getPositionUsec();
702 if (currentPositionUsec == ANDROID_UNKNOWN_TIME) {
703 // if we don't know where we are, assume the worst for the fill ratio
704 currentPositionUsec = 0;
705 }
706 if (mDurationUsec > 0) {
707 mCacheFill = (int16_t) ((1000.0
708 * (double)(currentPositionUsec + dataRemainingUs) / mDurationUsec));
709 } else {
710 mCacheFill = 0;
711 }
712 //SL_LOGV("cacheFill = %d", mCacheFill);
713
714 // cache status is evaluated against duration thresholds
715 if (dataRemainingUs > DURATION_CACHED_HIGH_MS*1000) {
716 mCacheStatus = kStatusHigh;
717 //ALOGV("high");
718 } else if (dataRemainingUs > DURATION_CACHED_MED_MS*1000) {
719 //ALOGV("enough");
720 mCacheStatus = kStatusEnough;
721 } else if (dataRemainingUs < DURATION_CACHED_LOW_MS*1000) {
722 //ALOGV("low");
723 mCacheStatus = kStatusLow;
724 } else {
725 mCacheStatus = kStatusIntermediate;
726 }
727
728 } else {
729 // unknown duration:
730
731 // cache status is evaluated against cache amount thresholds
732 // (no duration so we don't have the bitrate either, could be derived from format?)
733 if (dataRemaining > SIZE_CACHED_HIGH_BYTES) {
734 mCacheStatus = kStatusHigh;
735 } else if (dataRemaining > SIZE_CACHED_MED_BYTES) {
736 mCacheStatus = kStatusEnough;
737 } else if (dataRemaining < SIZE_CACHED_LOW_BYTES) {
738 mCacheStatus = kStatusLow;
739 } else {
740 mCacheStatus = kStatusIntermediate;
741 }
742 }
743
744 }
745
746 if (oldStatus != mCacheStatus) {
747 notifyStatus();
748 }
749
750 if (abs(mCacheFill - mLastNotifiedCacheFill) > mCacheFillNotifThreshold) {
751 notifyCacheFill();
752 }
753
754 return mCacheStatus;
755 }
756
757
hasNewDecodeParams()758 void AudioSfDecoder::hasNewDecodeParams() {
759
760 if ((mAudioSource != 0) && mAudioSourceStarted) {
761 sp<MetaData> meta = mAudioSource->getFormat();
762
763 int32_t channelCount;
764 CHECK(meta->findInt32(kKeyChannelCount, &channelCount));
765 int32_t sr;
766 CHECK(meta->findInt32(kKeySampleRate, &sr));
767
768 // FIXME similar to onPrepare()
769 {
770 android::Mutex::Autolock autoLock(mPcmFormatLock);
771 SL_LOGV("format changed: old sr=%d, channels=%d; new sr=%d, channels=%d",
772 mPcmFormatValues[ANDROID_KEY_INDEX_PCMFORMAT_SAMPLERATE],
773 mPcmFormatValues[ANDROID_KEY_INDEX_PCMFORMAT_NUMCHANNELS],
774 sr, channelCount);
775 mPcmFormatValues[ANDROID_KEY_INDEX_PCMFORMAT_NUMCHANNELS] = channelCount;
776 mPcmFormatValues[ANDROID_KEY_INDEX_PCMFORMAT_SAMPLERATE] = sr;
777 mPcmFormatValues[ANDROID_KEY_INDEX_PCMFORMAT_CHANNELMASK] =
778 channelCountToMask(channelCount);
779 }
780 // there's no need to do a notify of PLAYEREVENT_CHANNEL_COUNT,
781 // because the only listener is for volume updates, and decoders don't support that
782 }
783
784 // alert users of those params
785 updateAudioSink();
786 }
787
788 static const char* const kPlaybackOnlyCodecs[] = { MEDIA_MIMETYPE_AUDIO_AMR_NB,
789 MEDIA_MIMETYPE_AUDIO_AMR_WB };
790 #define NB_PLAYBACK_ONLY_CODECS (sizeof(kPlaybackOnlyCodecs)/sizeof(kPlaybackOnlyCodecs[0]))
791
isSupportedCodec(const char * mime)792 bool AudioSfDecoder::isSupportedCodec(const char* mime) {
793 bool codecRequiresPermission = false;
794 for (unsigned int i = 0 ; i < NB_PLAYBACK_ONLY_CODECS ; i++) {
795 if (!strcasecmp(mime, kPlaybackOnlyCodecs[i])) {
796 codecRequiresPermission = true;
797 break;
798 }
799 }
800 if (codecRequiresPermission) {
801 // verify only the system can decode, for playback only
802 return checkCallingPermission(
803 String16("android.permission.ALLOW_ANY_CODEC_FOR_PLAYBACK"));
804 } else {
805 return true;
806 }
807 }
808
809 } // namespace android
810