1 /*
2 * Copyright (C) 2009 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include <inttypes.h>
18 #include <fcntl.h>
19 #include <stdlib.h>
20 #include <string.h>
21 #include <sys/time.h>
22 #include <sys/types.h>
23 #include <sys/stat.h>
24
25 //#define LOG_NDEBUG 0
26 #define LOG_TAG "stagefright"
27 #include <media/stagefright/foundation/ADebug.h>
28
29 #include "jpeg.h"
30 #include "SineSource.h"
31
32 #include <binder/IServiceManager.h>
33 #include <binder/ProcessState.h>
34 #include <datasource/DataSourceFactory.h>
35 #include <media/DataSource.h>
36 #include <media/stagefright/MediaSource.h>
37 #include <media/IMediaHTTPService.h>
38 #include <media/IMediaPlayerService.h>
39 #include <media/stagefright/foundation/ABuffer.h>
40 #include <media/stagefright/foundation/ALooper.h>
41 #include <media/stagefright/foundation/AMessage.h>
42 #include <media/stagefright/foundation/AUtils.h>
43 #include <media/stagefright/JPEGSource.h>
44 #include <media/stagefright/InterfaceUtils.h>
45 #include <media/stagefright/MediaCodec.h>
46 #include <media/stagefright/MediaCodecConstants.h>
47 #include <media/stagefright/MediaCodecList.h>
48 #include <media/stagefright/MediaDefs.h>
49 #include <media/stagefright/MediaErrors.h>
50 #include <media/stagefright/MediaExtractor.h>
51 #include <media/stagefright/MediaExtractorFactory.h>
52 #include <media/stagefright/MetaData.h>
53 #include <media/stagefright/SimpleDecodingSource.h>
54 #include <media/stagefright/Utils.h>
55 #include <media/mediametadataretriever.h>
56
57 #include <media/stagefright/foundation/hexdump.h>
58 #include <media/stagefright/MPEG2TSWriter.h>
59 #include <media/stagefright/MPEG4Writer.h>
60
61 #include <private/media/VideoFrame.h>
62
63 #include <gui/GLConsumer.h>
64 #include <gui/Surface.h>
65 #include <gui/SurfaceComposerClient.h>
66
67 #include <android/hardware/media/omx/1.0/IOmx.h>
68
69 #include "AudioPlayer.h"
70
71 using namespace android;
72
73 namespace {
74 constexpr static int PIXEL_FORMAT_RGBA_1010102_AS_8888 = -HAL_PIXEL_FORMAT_RGBA_1010102;
75 }
76
77 static long gNumRepetitions;
78 static long gMaxNumFrames; // 0 means decode all available.
79 static long gReproduceBug; // if not -1.
80 static bool gPreferSoftwareCodec;
81 static bool gForceToUseHardwareCodec;
82 static bool gPlaybackAudio;
83 static bool gWriteMP4;
84 static bool gDisplayHistogram;
85 static bool gVerbose = false;
86 static bool showProgress = true;
87 static String8 gWriteMP4Filename;
88 static String8 gComponentNameOverride;
89
90 static sp<ANativeWindow> gSurface;
91
getNowUs()92 static int64_t getNowUs() {
93 struct timeval tv;
94 gettimeofday(&tv, NULL);
95
96 return (int64_t)tv.tv_usec + tv.tv_sec * 1000000ll;
97 }
98
CompareIncreasing(const int64_t * a,const int64_t * b)99 static int CompareIncreasing(const int64_t *a, const int64_t *b) {
100 return (*a) < (*b) ? -1 : (*a) > (*b) ? 1 : 0;
101 }
102
displayDecodeHistogram(Vector<int64_t> * decodeTimesUs)103 static void displayDecodeHistogram(Vector<int64_t> *decodeTimesUs) {
104 printf("decode times:\n");
105
106 decodeTimesUs->sort(CompareIncreasing);
107
108 size_t n = decodeTimesUs->size();
109 int64_t minUs = decodeTimesUs->itemAt(0);
110 int64_t maxUs = decodeTimesUs->itemAt(n - 1);
111
112 printf("min decode time %" PRId64 " us (%.2f secs)\n", minUs, minUs / 1E6);
113 printf("max decode time %" PRId64 " us (%.2f secs)\n", maxUs, maxUs / 1E6);
114
115 size_t counts[100];
116 for (size_t i = 0; i < 100; ++i) {
117 counts[i] = 0;
118 }
119
120 for (size_t i = 0; i < n; ++i) {
121 int64_t x = decodeTimesUs->itemAt(i);
122
123 size_t slot = ((x - minUs) * 100) / (maxUs - minUs);
124 if (slot == 100) { slot = 99; }
125
126 ++counts[slot];
127 }
128
129 for (size_t i = 0; i < 100; ++i) {
130 int64_t slotUs = minUs + (i * (maxUs - minUs) / 100);
131
132 double fps = 1E6 / slotUs;
133 printf("[%.2f fps]: %zu\n", fps, counts[i]);
134 }
135 }
136
displayAVCProfileLevelIfPossible(const sp<MetaData> & meta)137 static void displayAVCProfileLevelIfPossible(const sp<MetaData>& meta) {
138 uint32_t type;
139 const void *data;
140 size_t size;
141 if (meta->findData(kKeyAVCC, &type, &data, &size)) {
142 const uint8_t *ptr = (const uint8_t *)data;
143 CHECK(size >= 7);
144 CHECK(ptr[0] == 1); // configurationVersion == 1
145 uint8_t profile = ptr[1];
146 uint8_t level = ptr[3];
147 fprintf(stderr, "AVC video profile %d and level %d\n", profile, level);
148 }
149 }
150
dumpSource(const sp<MediaSource> & source,const String8 & filename)151 static void dumpSource(const sp<MediaSource> &source, const String8 &filename) {
152 FILE *out = fopen(filename.string(), "wb");
153
154 CHECK_EQ((status_t)OK, source->start());
155
156 status_t err;
157 for (;;) {
158 MediaBufferBase *mbuf;
159 err = source->read(&mbuf);
160
161 if (err == INFO_FORMAT_CHANGED) {
162 continue;
163 } else if (err != OK) {
164 break;
165 }
166
167 if (gVerbose) {
168 MetaDataBase &meta = mbuf->meta_data();
169 fprintf(stdout, "sample format: %s\n", meta.toString().c_str());
170 }
171
172 CHECK_EQ(
173 fwrite((const uint8_t *)mbuf->data() + mbuf->range_offset(),
174 1,
175 mbuf->range_length(),
176 out),
177 mbuf->range_length());
178
179 mbuf->release();
180 mbuf = NULL;
181 }
182
183 CHECK_EQ((status_t)OK, source->stop());
184
185 fclose(out);
186 out = NULL;
187 }
188
playSource(sp<MediaSource> & source)189 static void playSource(sp<MediaSource> &source) {
190 sp<MetaData> meta = source->getFormat();
191
192 const char *mime;
193 CHECK(meta->findCString(kKeyMIMEType, &mime));
194
195 sp<MediaSource> rawSource;
196 if (!strcasecmp(MEDIA_MIMETYPE_AUDIO_RAW, mime)) {
197 rawSource = source;
198 } else {
199 int flags = 0;
200 if (gPreferSoftwareCodec) {
201 flags |= MediaCodecList::kPreferSoftwareCodecs;
202 }
203 if (gForceToUseHardwareCodec) {
204 CHECK(!gPreferSoftwareCodec);
205 flags |= MediaCodecList::kHardwareCodecsOnly;
206 }
207 rawSource = SimpleDecodingSource::Create(
208 source, flags, gSurface,
209 gComponentNameOverride.isEmpty() ? nullptr : gComponentNameOverride.c_str(),
210 !gComponentNameOverride.isEmpty());
211 if (rawSource == NULL) {
212 return;
213 }
214 displayAVCProfileLevelIfPossible(meta);
215 }
216
217 source.clear();
218
219 status_t err = rawSource->start();
220
221 if (err != OK) {
222 fprintf(stderr, "rawSource returned error %d (0x%08x)\n", err, err);
223 return;
224 }
225
226 if (gPlaybackAudio) {
227 sp<AudioPlayer> player = sp<AudioPlayer>::make(nullptr);
228 player->setSource(rawSource);
229 rawSource.clear();
230
231 err = player->start(true /* sourceAlreadyStarted */);
232
233 if (err == OK) {
234 status_t finalStatus;
235 while (!player->reachedEOS(&finalStatus)) {
236 usleep(100000ll);
237 }
238 } else {
239 fprintf(stderr, "unable to start playback err=%d (0x%08x)\n", err, err);
240 }
241
242 return;
243 } else if (gReproduceBug >= 3 && gReproduceBug <= 5) {
244 int64_t durationUs;
245 CHECK(meta->findInt64(kKeyDuration, &durationUs));
246
247 status_t err;
248 MediaBufferBase *buffer;
249 MediaSource::ReadOptions options;
250 int64_t seekTimeUs = -1;
251 for (;;) {
252 err = rawSource->read(&buffer, &options);
253 options.clearSeekTo();
254
255 bool shouldSeek = false;
256 if (err == INFO_FORMAT_CHANGED) {
257 CHECK(buffer == NULL);
258
259 printf("format changed.\n");
260 continue;
261 } else if (err != OK) {
262 printf("reached EOF.\n");
263
264 shouldSeek = true;
265 } else {
266 int64_t timestampUs;
267 CHECK(buffer->meta_data().findInt64(kKeyTime, ×tampUs));
268
269 bool failed = false;
270
271 if (seekTimeUs >= 0) {
272 int64_t diff = timestampUs - seekTimeUs;
273
274 if (diff < 0) {
275 diff = -diff;
276 }
277
278 if ((gReproduceBug == 4 && diff > 500000)
279 || (gReproduceBug == 5 && timestampUs < 0)) {
280 printf("wanted: %.2f secs, got: %.2f secs\n",
281 seekTimeUs / 1E6, timestampUs / 1E6);
282
283 printf("ERROR: ");
284 failed = true;
285 }
286 }
287
288 printf("buffer has timestamp %" PRId64 " us (%.2f secs)\n",
289 timestampUs, timestampUs / 1E6);
290
291 buffer->release();
292 buffer = NULL;
293
294 if (failed) {
295 break;
296 }
297
298 shouldSeek = ((double)rand() / RAND_MAX) < 0.1;
299
300 if (gReproduceBug == 3) {
301 shouldSeek = false;
302 }
303 }
304
305 seekTimeUs = -1;
306
307 if (shouldSeek) {
308 seekTimeUs = (rand() * (float)durationUs) / (float)RAND_MAX;
309 options.setSeekTo(seekTimeUs);
310
311 printf("seeking to %" PRId64 " us (%.2f secs)\n",
312 seekTimeUs, seekTimeUs / 1E6);
313 }
314 }
315
316 rawSource->stop();
317
318 return;
319 }
320
321 int n = 0;
322 int64_t startTime = getNowUs();
323
324 long numIterationsLeft = gNumRepetitions;
325 MediaSource::ReadOptions options;
326
327 int64_t sumDecodeUs = 0;
328 int64_t totalBytes = 0;
329
330 Vector<int64_t> decodeTimesUs;
331
332 while (numIterationsLeft-- > 0) {
333 long numFrames = 0;
334
335 MediaBufferBase *buffer;
336
337 for (;;) {
338 int64_t startDecodeUs = getNowUs();
339 status_t err = rawSource->read(&buffer, &options);
340 int64_t delayDecodeUs = getNowUs() - startDecodeUs;
341
342 options.clearSeekTo();
343
344 if (err != OK) {
345 CHECK(buffer == NULL);
346
347 if (err == INFO_FORMAT_CHANGED) {
348 printf("format changed.\n");
349 continue;
350 }
351
352 break;
353 }
354
355 if (buffer->range_length() > 0) {
356 if (gDisplayHistogram && n > 0) {
357 // Ignore the first time since it includes some setup
358 // cost.
359 decodeTimesUs.push(delayDecodeUs);
360 }
361
362 if (gVerbose) {
363 MetaDataBase &meta = buffer->meta_data();
364 fprintf(stdout, "%ld sample format: %s\n", numFrames, meta.toString().c_str());
365 } else if (showProgress && (n++ % 16) == 0) {
366 printf(".");
367 fflush(stdout);
368 }
369 }
370
371 sumDecodeUs += delayDecodeUs;
372 totalBytes += buffer->range_length();
373
374 buffer->release();
375 buffer = NULL;
376
377 ++numFrames;
378 if (gMaxNumFrames > 0 && numFrames == gMaxNumFrames) {
379 break;
380 }
381
382 if (gReproduceBug == 1 && numFrames == 40) {
383 printf("seeking past the end now.");
384 options.setSeekTo(0x7fffffffL);
385 } else if (gReproduceBug == 2 && numFrames == 40) {
386 printf("seeking to 5 secs.");
387 options.setSeekTo(5000000);
388 }
389 }
390
391 if (showProgress) {
392 printf("$");
393 fflush(stdout);
394 }
395
396 options.setSeekTo(0);
397 }
398
399 rawSource->stop();
400 printf("\n");
401
402 int64_t delay = getNowUs() - startTime;
403 if (!strncasecmp("video/", mime, 6)) {
404 printf("avg. %.2f fps\n", n * 1E6 / delay);
405
406 printf("avg. time to decode one buffer %.2f usecs\n",
407 (double)sumDecodeUs / n);
408
409 printf("decoded a total of %d frame(s).\n", n);
410
411 if (gDisplayHistogram) {
412 displayDecodeHistogram(&decodeTimesUs);
413 }
414 } else if (!strncasecmp("audio/", mime, 6)) {
415 // Frame count makes less sense for audio, as the output buffer
416 // sizes may be different across decoders.
417 printf("avg. %.2f KB/sec\n", totalBytes / 1024 * 1E6 / delay);
418
419 printf("decoded a total of %" PRId64 " bytes\n", totalBytes);
420 }
421 }
422
423 ////////////////////////////////////////////////////////////////////////////////
424
425 struct DetectSyncSource : public MediaSource {
426 explicit DetectSyncSource(const sp<MediaSource> &source);
427
428 virtual status_t start(MetaData *params = NULL);
429 virtual status_t stop();
430 virtual sp<MetaData> getFormat();
431
432 virtual status_t read(
433 MediaBufferBase **buffer, const ReadOptions *options);
434
435 private:
436 enum StreamType {
437 AVC,
438 MPEG4,
439 H263,
440 OTHER,
441 };
442
443 sp<MediaSource> mSource;
444 StreamType mStreamType;
445 bool mSawFirstIDRFrame;
446
447 DISALLOW_EVIL_CONSTRUCTORS(DetectSyncSource);
448 };
449
DetectSyncSource(const sp<MediaSource> & source)450 DetectSyncSource::DetectSyncSource(const sp<MediaSource> &source)
451 : mSource(source),
452 mStreamType(OTHER),
453 mSawFirstIDRFrame(false) {
454 const char *mime;
455 CHECK(mSource->getFormat()->findCString(kKeyMIMEType, &mime));
456
457 if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_AVC)) {
458 mStreamType = AVC;
459 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_MPEG4)) {
460 mStreamType = MPEG4;
461 CHECK(!"sync frame detection not implemented yet for MPEG4");
462 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_H263)) {
463 mStreamType = H263;
464 CHECK(!"sync frame detection not implemented yet for H.263");
465 }
466 }
467
start(MetaData * params)468 status_t DetectSyncSource::start(MetaData *params) {
469 mSawFirstIDRFrame = false;
470
471 return mSource->start(params);
472 }
473
stop()474 status_t DetectSyncSource::stop() {
475 return mSource->stop();
476 }
477
getFormat()478 sp<MetaData> DetectSyncSource::getFormat() {
479 return mSource->getFormat();
480 }
481
isIDRFrame(MediaBufferBase * buffer)482 static bool isIDRFrame(MediaBufferBase *buffer) {
483 const uint8_t *data =
484 (const uint8_t *)buffer->data() + buffer->range_offset();
485 size_t size = buffer->range_length();
486 for (size_t i = 0; i + 3 < size; ++i) {
487 if (!memcmp("\x00\x00\x01", &data[i], 3)) {
488 uint8_t nalType = data[i + 3] & 0x1f;
489 if (nalType == 5) {
490 return true;
491 }
492 }
493 }
494
495 return false;
496 }
497
read(MediaBufferBase ** buffer,const ReadOptions * options)498 status_t DetectSyncSource::read(
499 MediaBufferBase **buffer, const ReadOptions *options) {
500 for (;;) {
501 status_t err = mSource->read(buffer, options);
502
503 if (err != OK) {
504 return err;
505 }
506
507 if (mStreamType == AVC) {
508 bool isIDR = isIDRFrame(*buffer);
509 (*buffer)->meta_data().setInt32(kKeyIsSyncFrame, isIDR);
510 if (isIDR) {
511 mSawFirstIDRFrame = true;
512 }
513 } else {
514 (*buffer)->meta_data().setInt32(kKeyIsSyncFrame, true);
515 }
516
517 if (mStreamType != AVC || mSawFirstIDRFrame) {
518 break;
519 }
520
521 // Ignore everything up to the first IDR frame.
522 (*buffer)->release();
523 *buffer = NULL;
524 }
525
526 return OK;
527 }
528
529 ////////////////////////////////////////////////////////////////////////////////
530
writeSourcesToMP4(Vector<sp<MediaSource>> & sources,bool syncInfoPresent)531 static void writeSourcesToMP4(
532 Vector<sp<MediaSource> > &sources, bool syncInfoPresent) {
533 #if 0
534 sp<MPEG4Writer> writer =
535 new MPEG4Writer(gWriteMP4Filename.string());
536 #else
537 int fd = open(gWriteMP4Filename.string(), O_CREAT | O_LARGEFILE | O_TRUNC | O_RDWR, S_IRUSR | S_IWUSR);
538 if (fd < 0) {
539 fprintf(stderr, "couldn't open file");
540 return;
541 }
542 sp<MPEG2TSWriter> writer =
543 new MPEG2TSWriter(fd);
544 #endif
545
546 // at most one minute.
547 writer->setMaxFileDuration(60000000ll);
548
549 for (size_t i = 0; i < sources.size(); ++i) {
550 sp<MediaSource> source = sources.editItemAt(i);
551
552 CHECK_EQ(writer->addSource(
553 syncInfoPresent ? source : new DetectSyncSource(source)),
554 (status_t)OK);
555 }
556
557 sp<MetaData> params = new MetaData;
558 params->setInt32(kKeyRealTimeRecording, false);
559 CHECK_EQ(writer->start(params.get()), (status_t)OK);
560
561 while (!writer->reachedEOS()) {
562 usleep(100000);
563 }
564 writer->stop();
565 }
566
performSeekTest(const sp<MediaSource> & source)567 static void performSeekTest(const sp<MediaSource> &source) {
568 CHECK_EQ((status_t)OK, source->start());
569
570 int64_t durationUs;
571 CHECK(source->getFormat()->findInt64(kKeyDuration, &durationUs));
572
573 for (int64_t seekTimeUs = 0; seekTimeUs <= durationUs;
574 seekTimeUs += 60000ll) {
575 MediaSource::ReadOptions options;
576 options.setSeekTo(
577 seekTimeUs, MediaSource::ReadOptions::SEEK_PREVIOUS_SYNC);
578
579 MediaBufferBase *buffer;
580 status_t err;
581 for (;;) {
582 err = source->read(&buffer, &options);
583
584 options.clearSeekTo();
585
586 if (err == INFO_FORMAT_CHANGED) {
587 CHECK(buffer == NULL);
588 continue;
589 }
590
591 if (err != OK) {
592 CHECK(buffer == NULL);
593 break;
594 }
595
596 CHECK(buffer != NULL);
597
598 if (buffer->range_length() > 0) {
599 break;
600 }
601
602 buffer->release();
603 buffer = NULL;
604 }
605
606 if (err == OK) {
607 int64_t timeUs;
608 CHECK(buffer->meta_data().findInt64(kKeyTime, &timeUs));
609
610 printf("%" PRId64 "\t%" PRId64 "\t%" PRId64 "\n",
611 seekTimeUs, timeUs, seekTimeUs - timeUs);
612
613 buffer->release();
614 buffer = NULL;
615 } else {
616 printf("ERROR\n");
617 break;
618 }
619 }
620
621 CHECK_EQ((status_t)OK, source->stop());
622 }
623
usage(const char * me)624 static void usage(const char *me) {
625 fprintf(stderr, "usage: %s [options] [input_filename]\n", me);
626 fprintf(stderr, " -h(elp)\n");
627 fprintf(stderr, " -a(udio)\n");
628 fprintf(stderr, " -n repetitions\n");
629 fprintf(stderr, " -l(ist) components\n");
630 fprintf(stderr, " -m max-number-of-frames-to-decode in each pass\n");
631 fprintf(stderr, " -b bug to reproduce\n");
632 fprintf(stderr, " -i(nfo) dump codec info (profiles and color formats supported, details)\n");
633 fprintf(stderr, " -t(humbnail) extract video thumbnail or album art (/sdcard/out.jpg)\n");
634 fprintf(stderr, " -P(ixelFormat) pixel format to use for raw thumbnail "
635 "(/sdcard/out.raw)\n");
636 fprintf(stderr, " %d: RGBA_565\n", HAL_PIXEL_FORMAT_RGB_565);
637 fprintf(stderr, " %d: RGBA_8888\n", HAL_PIXEL_FORMAT_RGBA_8888);
638 fprintf(stderr, " %d: BGRA_8888\n", HAL_PIXEL_FORMAT_BGRA_8888);
639 fprintf(stderr, " %d: RGBA_1010102\n", HAL_PIXEL_FORMAT_RGBA_1010102);
640 fprintf(stderr, " %d: RGBA_1010102 as RGBA_8888\n", PIXEL_FORMAT_RGBA_1010102_AS_8888);
641 fprintf(stderr, " -s(oftware) prefer software codec\n");
642 fprintf(stderr, " -r(hardware) force to use hardware codec\n");
643 fprintf(stderr, " -o playback audio\n");
644 fprintf(stderr, " -w(rite) filename (write to .mp4 file)\n");
645 fprintf(stderr, " -k seek test\n");
646 fprintf(stderr, " -N(ame) of the component\n");
647 fprintf(stderr, " -x display a histogram of decoding times/fps "
648 "(video only)\n");
649 fprintf(stderr, " -q don't show progress indicator\n");
650 fprintf(stderr, " -S allocate buffers from a surface\n");
651 fprintf(stderr, " -T allocate buffers from a surface texture\n");
652 fprintf(stderr, " -d(ump) output_filename (raw stream data to a file)\n");
653 fprintf(stderr, " -D(ump) output_filename (decoded PCM data to a file)\n");
654 fprintf(stderr, " -v be more verbose\n");
655 }
656
dumpCodecDetails(bool queryDecoders)657 static void dumpCodecDetails(bool queryDecoders) {
658 const char *codecType = queryDecoders? "Decoder" : "Encoder";
659 printf("\n%s infos by media types:\n"
660 "=============================\n", codecType);
661
662 sp<IMediaCodecList> list = MediaCodecList::getInstance();
663 size_t numCodecs = list->countCodecs();
664
665 // gather all media types supported by codec class, and link to codecs that support them
666 KeyedVector<AString, Vector<sp<MediaCodecInfo>>> allMediaTypes;
667 for (size_t codec_ix = 0; codec_ix < numCodecs; ++codec_ix) {
668 sp<MediaCodecInfo> info = list->getCodecInfo(codec_ix);
669 if (info->isEncoder() == !queryDecoders) {
670 Vector<AString> supportedMediaTypes;
671 info->getSupportedMediaTypes(&supportedMediaTypes);
672 if (!supportedMediaTypes.size()) {
673 printf("warning: %s does not support any media types\n",
674 info->getCodecName());
675 } else {
676 for (const AString &mediaType : supportedMediaTypes) {
677 if (allMediaTypes.indexOfKey(mediaType) < 0) {
678 allMediaTypes.add(mediaType, Vector<sp<MediaCodecInfo>>());
679 }
680 allMediaTypes.editValueFor(mediaType).add(info);
681 }
682 }
683 }
684 }
685
686 KeyedVector<AString, bool> visitedCodecs;
687 for (size_t type_ix = 0; type_ix < allMediaTypes.size(); ++type_ix) {
688 const AString &mediaType = allMediaTypes.keyAt(type_ix);
689 printf("\nMedia type '%s':\n", mediaType.c_str());
690
691 for (const sp<MediaCodecInfo> &info : allMediaTypes.valueAt(type_ix)) {
692 sp<MediaCodecInfo::Capabilities> caps = info->getCapabilitiesFor(mediaType.c_str());
693 if (caps == NULL) {
694 printf("warning: %s does not have capabilities for type %s\n",
695 info->getCodecName(), mediaType.c_str());
696 continue;
697 }
698 printf(" %s \"%s\" supports\n",
699 codecType, info->getCodecName());
700
701 auto printList = [](const char *type, const Vector<AString> &values){
702 printf(" %s: [", type);
703 for (size_t j = 0; j < values.size(); ++j) {
704 printf("\n %s%s", values[j].c_str(),
705 j == values.size() - 1 ? " " : ",");
706 }
707 printf("]\n");
708 };
709
710 if (visitedCodecs.indexOfKey(info->getCodecName()) < 0) {
711 visitedCodecs.add(info->getCodecName(), true);
712 {
713 Vector<AString> aliases;
714 info->getAliases(&aliases);
715 // quote alias
716 for (AString &alias : aliases) {
717 alias.insert("\"", 1, 0);
718 alias.append('"');
719 }
720 printList("aliases", aliases);
721 }
722 {
723 uint32_t attrs = info->getAttributes();
724 Vector<AString> list;
725 list.add(AStringPrintf("encoder: %d", !!(attrs & MediaCodecInfo::kFlagIsEncoder)));
726 list.add(AStringPrintf("vendor: %d", !!(attrs & MediaCodecInfo::kFlagIsVendor)));
727 list.add(AStringPrintf("software-only: %d", !!(attrs & MediaCodecInfo::kFlagIsSoftwareOnly)));
728 list.add(AStringPrintf("hw-accelerated: %d", !!(attrs & MediaCodecInfo::kFlagIsHardwareAccelerated)));
729 printList(AStringPrintf("attributes: %#x", attrs).c_str(), list);
730 }
731
732 printf(" owner: \"%s\"\n", info->getOwnerName());
733 printf(" rank: %u\n", info->getRank());
734 } else {
735 printf(" aliases, attributes, owner, rank: see above\n");
736 }
737
738 {
739 Vector<AString> list;
740 Vector<MediaCodecInfo::ProfileLevel> profileLevels;
741 caps->getSupportedProfileLevels(&profileLevels);
742 for (const MediaCodecInfo::ProfileLevel &pl : profileLevels) {
743 const char *niceProfile =
744 mediaType.equalsIgnoreCase(MIMETYPE_AUDIO_AAC) ? asString_AACObject(pl.mProfile) :
745 mediaType.equalsIgnoreCase(MIMETYPE_VIDEO_MPEG2) ? asString_MPEG2Profile(pl.mProfile) :
746 mediaType.equalsIgnoreCase(MIMETYPE_VIDEO_H263) ? asString_H263Profile(pl.mProfile) :
747 mediaType.equalsIgnoreCase(MIMETYPE_VIDEO_MPEG4) ? asString_MPEG4Profile(pl.mProfile) :
748 mediaType.equalsIgnoreCase(MIMETYPE_VIDEO_AVC) ? asString_AVCProfile(pl.mProfile) :
749 mediaType.equalsIgnoreCase(MIMETYPE_VIDEO_VP8) ? asString_VP8Profile(pl.mProfile) :
750 mediaType.equalsIgnoreCase(MIMETYPE_VIDEO_HEVC) ? asString_HEVCProfile(pl.mProfile) :
751 mediaType.equalsIgnoreCase(MIMETYPE_VIDEO_VP9) ? asString_VP9Profile(pl.mProfile) :
752 mediaType.equalsIgnoreCase(MIMETYPE_VIDEO_AV1) ? asString_AV1Profile(pl.mProfile) :"??";
753 const char *niceLevel =
754 mediaType.equalsIgnoreCase(MIMETYPE_VIDEO_MPEG2) ? asString_MPEG2Level(pl.mLevel) :
755 mediaType.equalsIgnoreCase(MIMETYPE_VIDEO_H263) ? asString_H263Level(pl.mLevel) :
756 mediaType.equalsIgnoreCase(MIMETYPE_VIDEO_MPEG4) ? asString_MPEG4Level(pl.mLevel) :
757 mediaType.equalsIgnoreCase(MIMETYPE_VIDEO_AVC) ? asString_AVCLevel(pl.mLevel) :
758 mediaType.equalsIgnoreCase(MIMETYPE_VIDEO_VP8) ? asString_VP8Level(pl.mLevel) :
759 mediaType.equalsIgnoreCase(MIMETYPE_VIDEO_HEVC) ? asString_HEVCTierLevel(pl.mLevel) :
760 mediaType.equalsIgnoreCase(MIMETYPE_VIDEO_VP9) ? asString_VP9Level(pl.mLevel) :
761 mediaType.equalsIgnoreCase(MIMETYPE_VIDEO_AV1) ? asString_AV1Level(pl.mLevel) :
762 "??";
763
764 list.add(AStringPrintf("% 5u/% 5u (%s/%s)",
765 pl.mProfile, pl.mLevel, niceProfile, niceLevel));
766 }
767 printList("profile/levels", list);
768 }
769
770 {
771 Vector<AString> list;
772 Vector<uint32_t> colors;
773 caps->getSupportedColorFormats(&colors);
774 for (uint32_t color : colors) {
775 list.add(AStringPrintf("%#x (%s)", color,
776 asString_ColorFormat((int32_t)color)));
777 }
778 printList("colors", list);
779 }
780
781 printf(" details: %s\n", caps->getDetails()->debugString(6).c_str());
782 }
783 }
784 }
785
main(int argc,char ** argv)786 int main(int argc, char **argv) {
787 android::ProcessState::self()->startThreadPool();
788
789 bool audioOnly = false;
790 bool listComponents = false;
791 bool dumpCodecInfo = false;
792 bool extractThumbnail = false;
793 bool seekTest = false;
794 bool useSurfaceAlloc = false;
795 bool useSurfaceTexAlloc = false;
796 bool dumpStream = false;
797 bool dumpPCMStream = false;
798 int32_t pixelFormat = 0; // thumbnail pixel format
799 String8 dumpStreamFilename;
800 gNumRepetitions = 1;
801 gMaxNumFrames = 0;
802 gReproduceBug = -1;
803 gPreferSoftwareCodec = false;
804 gForceToUseHardwareCodec = false;
805 gPlaybackAudio = false;
806 gWriteMP4 = false;
807 gDisplayHistogram = false;
808
809 sp<android::ALooper> looper;
810
811 int res;
812 while ((res = getopt(argc, argv, "vhaqn:lm:b:itsrow:kN:xSTd:D:P:")) >= 0) {
813 switch (res) {
814 case 'a':
815 {
816 audioOnly = true;
817 break;
818 }
819
820 case 'q':
821 {
822 showProgress = false;
823 break;
824 }
825
826 case 'd':
827 {
828 dumpStream = true;
829 dumpStreamFilename.setTo(optarg);
830 break;
831 }
832
833 case 'D':
834 {
835 dumpPCMStream = true;
836 audioOnly = true;
837 dumpStreamFilename.setTo(optarg);
838 break;
839 }
840
841 case 'N':
842 {
843 gComponentNameOverride.setTo(optarg);
844 break;
845 }
846
847 case 'l':
848 {
849 listComponents = true;
850 break;
851 }
852
853 case 'P':
854 case 'm':
855 case 'n':
856 case 'b':
857 {
858 char *end;
859 long x = strtol(optarg, &end, 10);
860
861 if (*end != '\0' || end == optarg || x <= 0) {
862 x = 1;
863 }
864
865 if (res == 'n') {
866 gNumRepetitions = x;
867 } else if (res == 'm') {
868 gMaxNumFrames = x;
869 } else if (res == 'P') {
870 pixelFormat = x;
871 } else {
872 CHECK_EQ(res, 'b');
873 gReproduceBug = x;
874 }
875 break;
876 }
877
878 case 'w':
879 {
880 gWriteMP4 = true;
881 gWriteMP4Filename.setTo(optarg);
882 break;
883 }
884
885 case 'i':
886 {
887 dumpCodecInfo = true;
888 break;
889 }
890
891 case 't':
892 {
893 extractThumbnail = true;
894 break;
895 }
896
897 case 's':
898 {
899 gPreferSoftwareCodec = true;
900 break;
901 }
902
903 case 'r':
904 {
905 gForceToUseHardwareCodec = true;
906 break;
907 }
908
909 case 'o':
910 {
911 gPlaybackAudio = true;
912 break;
913 }
914
915 case 'k':
916 {
917 seekTest = true;
918 break;
919 }
920
921 case 'x':
922 {
923 gDisplayHistogram = true;
924 break;
925 }
926
927 case 'S':
928 {
929 useSurfaceAlloc = true;
930 break;
931 }
932
933 case 'T':
934 {
935 useSurfaceTexAlloc = true;
936 break;
937 }
938
939 case 'v':
940 {
941 gVerbose = true;
942 break;
943 }
944
945 case '?':
946 case 'h':
947 default:
948 {
949 usage(argv[0]);
950 exit(1);
951 break;
952 }
953 }
954 }
955
956 if (gPlaybackAudio && !audioOnly) {
957 // This doesn't make any sense if we're decoding the video track.
958 gPlaybackAudio = false;
959 }
960
961 argc -= optind;
962 argv += optind;
963
964 if (extractThumbnail) {
965 sp<IServiceManager> sm = defaultServiceManager();
966 sp<IBinder> binder = sm->getService(String16("media.player"));
967 sp<IMediaPlayerService> service =
968 interface_cast<IMediaPlayerService>(binder);
969
970 CHECK(service.get() != NULL);
971
972 sp<IMediaMetadataRetriever> retriever =
973 service->createMetadataRetriever();
974
975 CHECK(retriever != NULL);
976
977 for (int k = 0; k < argc; ++k) {
978 const char *filename = argv[k];
979
980 bool failed = true;
981
982 int fd = open(filename, O_RDONLY | O_LARGEFILE);
983 CHECK_GE(fd, 0);
984
985 off64_t fileSize = lseek64(fd, 0, SEEK_END);
986 CHECK_GE(fileSize, 0ll);
987
988 CHECK_EQ(retriever->setDataSource(fd, 0, fileSize), (status_t)OK);
989
990 close(fd);
991 fd = -1;
992
993 uint32_t retrieverPixelFormat = HAL_PIXEL_FORMAT_RGB_565;
994 if (pixelFormat == PIXEL_FORMAT_RGBA_1010102_AS_8888) {
995 retrieverPixelFormat = HAL_PIXEL_FORMAT_RGBA_1010102;
996 } else if (pixelFormat) {
997 retrieverPixelFormat = pixelFormat;
998 }
999 sp<IMemory> mem =
1000 retriever->getFrameAtTime(-1,
1001 MediaSource::ReadOptions::SEEK_PREVIOUS_SYNC,
1002 retrieverPixelFormat, false /*metaOnly*/);
1003
1004 if (mem != NULL) {
1005 failed = false;
1006 printf("getFrameAtTime(%s) format=%d => OK\n", filename, retrieverPixelFormat);
1007
1008 VideoFrame *frame = (VideoFrame *)mem->unsecurePointer();
1009
1010 if (pixelFormat) {
1011 int bpp = 0;
1012 switch (pixelFormat) {
1013 case HAL_PIXEL_FORMAT_RGB_565:
1014 bpp = 2;
1015 break;
1016 case PIXEL_FORMAT_RGBA_1010102_AS_8888:
1017 // convert RGBA_1010102 to RGBA_8888
1018 {
1019 uint32_t *data = (uint32_t *)frame->getFlattenedData();
1020 uint32_t *end = data + frame->mWidth * frame->mHeight;
1021 for (; data < end; ++data) {
1022 *data =
1023 // pick out 8-bit R, G, B values and move them to the
1024 // correct position
1025 ( (*data & 0x3fc) >> 2) | // R
1026 ( (*data & 0xff000) >> 4) | // G
1027 ( (*data & 0x3fc00000) >> 6) | // B
1028 // pick out 2-bit A and expand to 8-bits
1029 (((*data & 0xc0000000) >> 6) * 0x55);
1030 }
1031 }
1032
1033 FALLTHROUGH_INTENDED;
1034
1035 case HAL_PIXEL_FORMAT_RGBA_1010102:
1036 case HAL_PIXEL_FORMAT_RGBA_8888:
1037 case HAL_PIXEL_FORMAT_BGRA_8888:
1038 bpp = 4;
1039 break;
1040 }
1041 if (bpp) {
1042 FILE *out = fopen("/sdcard/out.raw", "wb");
1043 fwrite(frame->getFlattenedData(), bpp * frame->mWidth, frame->mHeight, out);
1044 fclose(out);
1045
1046 printf("write out %d x %d x %dbpp\n", frame->mWidth, frame->mHeight, bpp);
1047 } else {
1048 printf("unknown pixel format.\n");
1049 }
1050 } else {
1051 CHECK_EQ(writeJpegFile("/sdcard/out.jpg",
1052 frame->getFlattenedData(),
1053 frame->mWidth, frame->mHeight), 0);
1054 }
1055 }
1056
1057 if (!pixelFormat) {
1058 mem = retriever->extractAlbumArt();
1059
1060 if (mem != NULL) {
1061 failed = false;
1062 printf("extractAlbumArt(%s) => OK\n", filename);
1063 }
1064 }
1065
1066 if (failed) {
1067 printf("both getFrameAtTime and extractAlbumArt "
1068 "failed on file '%s'.\n", filename);
1069 }
1070 }
1071
1072 return 0;
1073 }
1074
1075 if (dumpCodecInfo) {
1076 dumpCodecDetails(true /* queryDecoders */);
1077 dumpCodecDetails(false /* queryDecoders */);
1078 }
1079
1080 if (listComponents) {
1081 using ::android::hardware::hidl_vec;
1082 using ::android::hardware::hidl_string;
1083 using namespace ::android::hardware::media::omx::V1_0;
1084 sp<IOmx> omx = IOmx::getService();
1085 CHECK(omx.get() != nullptr);
1086
1087 hidl_vec<IOmx::ComponentInfo> nodeList;
1088 auto transStatus = omx->listNodes([](
1089 const auto& status, const auto& nodeList) {
1090 CHECK(status == Status::OK);
1091 for (const auto& info : nodeList) {
1092 printf("%s\t Roles: ", info.mName.c_str());
1093 for (const auto& role : info.mRoles) {
1094 printf("%s\t", role.c_str());
1095 }
1096 }
1097 });
1098 CHECK(transStatus.isOk());
1099 }
1100
1101 sp<SurfaceComposerClient> composerClient;
1102 sp<SurfaceControl> control;
1103
1104 if ((useSurfaceAlloc || useSurfaceTexAlloc) && !audioOnly) {
1105 if (useSurfaceAlloc) {
1106 composerClient = new SurfaceComposerClient;
1107 CHECK_EQ(composerClient->initCheck(), (status_t)OK);
1108
1109 control = composerClient->createSurface(
1110 String8("A Surface"),
1111 1280,
1112 800,
1113 PIXEL_FORMAT_RGB_565,
1114 0);
1115
1116 CHECK(control != NULL);
1117 CHECK(control->isValid());
1118
1119 SurfaceComposerClient::Transaction{}
1120 .setLayer(control, INT_MAX)
1121 .show(control)
1122 .apply();
1123
1124 gSurface = control->getSurface();
1125 CHECK(gSurface != NULL);
1126 } else {
1127 CHECK(useSurfaceTexAlloc);
1128
1129 sp<IGraphicBufferProducer> producer;
1130 sp<IGraphicBufferConsumer> consumer;
1131 BufferQueue::createBufferQueue(&producer, &consumer);
1132 sp<GLConsumer> texture = new GLConsumer(consumer, 0 /* tex */,
1133 GLConsumer::TEXTURE_EXTERNAL, true /* useFenceSync */,
1134 false /* isControlledByApp */);
1135 gSurface = new Surface(producer);
1136 }
1137 }
1138
1139 status_t err = OK;
1140
1141 for (int k = 0; k < argc && err == OK; ++k) {
1142 bool syncInfoPresent = true;
1143
1144 const char *filename = argv[k];
1145
1146 sp<DataSource> dataSource =
1147 DataSourceFactory::getInstance()->CreateFromURI(NULL /* httpService */, filename);
1148
1149 if (strncasecmp(filename, "sine:", 5) && dataSource == NULL) {
1150 fprintf(stderr, "Unable to create data source.\n");
1151 return 1;
1152 }
1153
1154 bool isJPEG = false;
1155
1156 size_t len = strlen(filename);
1157 if (len >= 4 && !strcasecmp(filename + len - 4, ".jpg")) {
1158 isJPEG = true;
1159 }
1160
1161 Vector<sp<MediaSource> > mediaSources;
1162 sp<MediaSource> mediaSource;
1163
1164 if (isJPEG) {
1165 mediaSource = new JPEGSource(dataSource);
1166 if (gWriteMP4) {
1167 mediaSources.push(mediaSource);
1168 }
1169 } else if (!strncasecmp("sine:", filename, 5)) {
1170 char *end;
1171 long sampleRate = strtol(filename + 5, &end, 10);
1172
1173 if (end == filename + 5) {
1174 sampleRate = 44100;
1175 }
1176 mediaSource = new SineSource(sampleRate, 1);
1177 if (gWriteMP4) {
1178 mediaSources.push(mediaSource);
1179 }
1180 } else {
1181 sp<IMediaExtractor> extractor = MediaExtractorFactory::Create(dataSource);
1182
1183 if (extractor == NULL) {
1184 fprintf(stderr, "could not create extractor.\n");
1185 return -1;
1186 }
1187
1188 sp<MetaData> meta = extractor->getMetaData();
1189
1190 if (meta != NULL) {
1191 const char *mime;
1192 if (!meta->findCString(kKeyMIMEType, &mime)) {
1193 fprintf(stderr, "extractor did not provide MIME type.\n");
1194 return -1;
1195 }
1196
1197 if (!strcasecmp(mime, MEDIA_MIMETYPE_CONTAINER_MPEG2TS)) {
1198 syncInfoPresent = false;
1199 }
1200 }
1201
1202 size_t numTracks = extractor->countTracks();
1203
1204 if (gWriteMP4) {
1205 bool haveAudio = false;
1206 bool haveVideo = false;
1207 for (size_t i = 0; i < numTracks; ++i) {
1208 sp<MediaSource> source = CreateMediaSourceFromIMediaSource(
1209 extractor->getTrack(i));
1210 if (source == nullptr) {
1211 fprintf(stderr, "skip NULL track %zu, track count %zu.\n", i, numTracks);
1212 continue;
1213 }
1214
1215 const char *mime;
1216 CHECK(source->getFormat()->findCString(
1217 kKeyMIMEType, &mime));
1218
1219 bool useTrack = false;
1220 if (!haveAudio && !strncasecmp("audio/", mime, 6)) {
1221 haveAudio = true;
1222 useTrack = true;
1223 } else if (!haveVideo && !strncasecmp("video/", mime, 6)) {
1224 haveVideo = true;
1225 useTrack = true;
1226 }
1227
1228 if (useTrack) {
1229 mediaSources.push(source);
1230
1231 if (haveAudio && haveVideo) {
1232 break;
1233 }
1234 }
1235 }
1236 } else {
1237 sp<MetaData> meta;
1238 size_t i;
1239 for (i = 0; i < numTracks; ++i) {
1240 meta = extractor->getTrackMetaData(
1241 i, MediaExtractor::kIncludeExtensiveMetaData);
1242
1243 if (meta == NULL) {
1244 continue;
1245 }
1246 const char *mime;
1247 meta->findCString(kKeyMIMEType, &mime);
1248
1249 if (audioOnly && !strncasecmp(mime, "audio/", 6)) {
1250 break;
1251 }
1252
1253 if (!audioOnly && !strncasecmp(mime, "video/", 6)) {
1254 break;
1255 }
1256
1257 meta = NULL;
1258 }
1259
1260 if (meta == NULL) {
1261 fprintf(stderr,
1262 "No suitable %s track found. The '-a' option will "
1263 "target audio tracks only, the default is to target "
1264 "video tracks only.\n",
1265 audioOnly ? "audio" : "video");
1266 return -1;
1267 }
1268
1269 int64_t thumbTimeUs;
1270 if (meta->findInt64(kKeyThumbnailTime, &thumbTimeUs)) {
1271 printf("thumbnailTime: %" PRId64 " us (%.2f secs)\n",
1272 thumbTimeUs, thumbTimeUs / 1E6);
1273 }
1274
1275 mediaSource = CreateMediaSourceFromIMediaSource(extractor->getTrack(i));
1276 if (mediaSource == nullptr) {
1277 fprintf(stderr, "skip NULL track %zu, total tracks %zu.\n", i, numTracks);
1278 return -1;
1279 }
1280 }
1281 }
1282
1283 if (gWriteMP4) {
1284 writeSourcesToMP4(mediaSources, syncInfoPresent);
1285 } else if (dumpStream) {
1286 dumpSource(mediaSource, dumpStreamFilename);
1287 } else if (dumpPCMStream) {
1288 sp<MediaSource> decSource = SimpleDecodingSource::Create(mediaSource);
1289 dumpSource(decSource, dumpStreamFilename);
1290 } else if (seekTest) {
1291 performSeekTest(mediaSource);
1292 } else {
1293 playSource(mediaSource);
1294 }
1295 }
1296
1297 if ((useSurfaceAlloc || useSurfaceTexAlloc) && !audioOnly) {
1298 gSurface.clear();
1299
1300 if (useSurfaceAlloc) {
1301 composerClient->dispose();
1302 }
1303 }
1304
1305 return 0;
1306 }
1307