1 /* 2 * Copyright (C) 2013 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 package android.media; 18 19 import android.annotation.IntDef; 20 import android.annotation.NonNull; 21 import android.compat.annotation.UnsupportedAppUsage; 22 import android.media.MediaCodec.BufferInfo; 23 import android.os.Build; 24 25 import dalvik.system.CloseGuard; 26 27 import java.io.FileDescriptor; 28 import java.io.IOException; 29 import java.io.RandomAccessFile; 30 import java.lang.annotation.Retention; 31 import java.lang.annotation.RetentionPolicy; 32 import java.nio.ByteBuffer; 33 import java.util.Map; 34 35 /** 36 * MediaMuxer facilitates muxing elementary streams. Currently MediaMuxer supports MP4, Webm 37 * and 3GP file as the output. It also supports muxing B-frames in MP4 since Android Nougat. 38 * <p> 39 * It is generally used like this: 40 * 41 * <pre> 42 * MediaMuxer muxer = new MediaMuxer("temp.mp4", OutputFormat.MUXER_OUTPUT_MPEG_4); 43 * // More often, the MediaFormat will be retrieved from MediaCodec.getOutputFormat() 44 * // or MediaExtractor.getTrackFormat(). 45 * MediaFormat audioFormat = new MediaFormat(...); 46 * MediaFormat videoFormat = new MediaFormat(...); 47 * int audioTrackIndex = muxer.addTrack(audioFormat); 48 * int videoTrackIndex = muxer.addTrack(videoFormat); 49 * ByteBuffer inputBuffer = ByteBuffer.allocate(bufferSize); 50 * boolean finished = false; 51 * BufferInfo bufferInfo = new BufferInfo(); 52 * 53 * muxer.start(); 54 * while(!finished) { 55 * // getInputBuffer() will fill the inputBuffer with one frame of encoded 56 * // sample from either MediaCodec or MediaExtractor, set isAudioSample to 57 * // true when the sample is audio data, set up all the fields of bufferInfo, 58 * // and return true if there are no more samples. 59 * finished = getInputBuffer(inputBuffer, isAudioSample, bufferInfo); 60 * if (!finished) { 61 * int currentTrackIndex = isAudioSample ? audioTrackIndex : videoTrackIndex; 62 * muxer.writeSampleData(currentTrackIndex, inputBuffer, bufferInfo); 63 * } 64 * }; 65 * muxer.stop(); 66 * muxer.release(); 67 * </pre> 68 * 69 70 <h4>Metadata Track</h4> 71 <p> 72 Per-frame metadata is useful in carrying extra information that correlated with video or audio to 73 facilitate offline processing, e.g. gyro signals from the sensor could help video stabilization when 74 doing offline processing. Metadata track is only supported in MP4 container. When adding a new 75 metadata track, track's mime format must start with prefix "application/", e.g. "applicaton/gyro". 76 Metadata's format/layout will be defined by the application. Writing metadata is nearly the same as 77 writing video/audio data except that the data will not be from mediacodec. Application just needs 78 to pass the bytebuffer that contains the metadata and also the associated timestamp to the 79 {@link #writeSampleData} api. The timestamp must be in the same time base as video and audio. The 80 generated MP4 file uses TextMetaDataSampleEntry defined in section 12.3.3.2 of the ISOBMFF to signal 81 the metadata's mime format. When using{@link android.media.MediaExtractor} to extract the file with 82 metadata track, the mime format of the metadata will be extracted into {@link android.media.MediaFormat}. 83 84 <pre class=prettyprint> 85 MediaMuxer muxer = new MediaMuxer("temp.mp4", OutputFormat.MUXER_OUTPUT_MPEG_4); 86 // SetUp Video/Audio Tracks. 87 MediaFormat audioFormat = new MediaFormat(...); 88 MediaFormat videoFormat = new MediaFormat(...); 89 int audioTrackIndex = muxer.addTrack(audioFormat); 90 int videoTrackIndex = muxer.addTrack(videoFormat); 91 92 // Setup Metadata Track 93 MediaFormat metadataFormat = new MediaFormat(...); 94 metadataFormat.setString(KEY_MIME, "application/gyro"); 95 int metadataTrackIndex = muxer.addTrack(metadataFormat); 96 97 muxer.start(); 98 while(..) { 99 // Allocate bytebuffer and write gyro data(x,y,z) into it. 100 ByteBuffer metaData = ByteBuffer.allocate(bufferSize); 101 metaData.putFloat(x); 102 metaData.putFloat(y); 103 metaData.putFloat(z); 104 BufferInfo metaInfo = new BufferInfo(); 105 // Associate this metadata with the video frame by setting 106 // the same timestamp as the video frame. 107 metaInfo.presentationTimeUs = currentVideoTrackTimeUs; 108 metaInfo.offset = 0; 109 metaInfo.flags = 0; 110 metaInfo.size = bufferSize; 111 muxer.writeSampleData(metadataTrackIndex, metaData, metaInfo); 112 }; 113 muxer.stop(); 114 muxer.release(); 115 }</pre> 116 117 <h2 id=History><a name="History"></a>Features and API History</h2> 118 <p> 119 The following table summarizes the feature support in different API version and containers. 120 For API version numbers, see {@link android.os.Build.VERSION_CODES}. 121 122 <style> 123 .api > tr > th, .api > tr > td { text-align: center; padding: 4px 4px; } 124 .api > tr > th { vertical-align: bottom; } 125 .api > tr > td { vertical-align: middle; } 126 .sml > tr > th, .sml > tr > td { text-align: center; padding: 2px 4px; } 127 .fn { text-align: center; } 128 </style> 129 130 <table align="right" style="width: 0%"> 131 <thead> 132 <tbody class=api> 133 <tr><th>Symbol</th> 134 <th>Meaning</th></tr> 135 </tbody> 136 </thead> 137 <tbody class=sml> 138 <tr><td>●</td><td>Supported</td></tr> 139 <tr><td>○</td><td>Not supported</td></tr> 140 <tr><td>▧</td><td>Supported in MP4/WebM/3GP</td></tr> 141 <tr><td>⁕</td><td>Only Supported in MP4</td></tr> 142 </tbody> 143 </table> 144 <table align="center" style="width: 100%;"> 145 <thead class=api> 146 <tr> 147 <th rowspan=2>Feature</th> 148 <th colspan="24">SDK Version</th> 149 </tr> 150 <tr> 151 <th>18</th> 152 <th>19</th> 153 <th>20</th> 154 <th>21</th> 155 <th>22</th> 156 <th>23</th> 157 <th>24</th> 158 <th>25</th> 159 <th>26+</th> 160 </tr> 161 </thead> 162 <tbody class=api> 163 <tr> 164 <td align="center">MP4 container</td> 165 <td>●</td> 166 <td>●</td> 167 <td>●</td> 168 <td>●</td> 169 <td>●</td> 170 <td>●</td> 171 <td>●</td> 172 <td>●</td> 173 <td>●</td> 174 </tr> 175 <td align="center">WebM container</td> 176 <td>○</td> 177 <td>○</td> 178 <td>○</td> 179 <td>●</td> 180 <td>●</td> 181 <td>●</td> 182 <td>●</td> 183 <td>●</td> 184 <td>●</td> 185 </tr> 186 <td align="center">3GP container</td> 187 <td>○</td> 188 <td>○</td> 189 <td>○</td> 190 <td>○</td> 191 <td>○</td> 192 <td>○</td> 193 <td>○</td> 194 <td>○</td> 195 <td>●</td> 196 </tr> 197 <td align="center">Muxing B-Frames(bi-directional predicted frames)</td> 198 <td>○</td> 199 <td>○</td> 200 <td>○</td> 201 <td>○</td> 202 <td>○</td> 203 <td>○</td> 204 <td>⁕</td> 205 <td>⁕</td> 206 <td>⁕</td> 207 </tr> 208 </tr> 209 <td align="center">Muxing Single Video/Audio Track</td> 210 <td>▧</td> 211 <td>▧</td> 212 <td>▧</td> 213 <td>▧</td> 214 <td>▧</td> 215 <td>▧</td> 216 <td>▧</td> 217 <td>▧</td> 218 <td>▧</td> 219 </tr> 220 </tr> 221 <td align="center">Muxing Multiple Video/Audio Tracks</td> 222 <td>○</td> 223 <td>○</td> 224 <td>○</td> 225 <td>○</td> 226 <td>○</td> 227 <td>○</td> 228 <td>○</td> 229 <td>○</td> 230 <td>⁕</td> 231 </tr> 232 </tr> 233 <td align="center">Muxing Metadata Tracks</td> 234 <td>○</td> 235 <td>○</td> 236 <td>○</td> 237 <td>○</td> 238 <td>○</td> 239 <td>○</td> 240 <td>○</td> 241 <td>○</td> 242 <td>⁕</td> 243 </tr> 244 </tbody> 245 </table> 246 */ 247 248 final public class MediaMuxer { 249 250 static { 251 System.loadLibrary("media_jni"); 252 } 253 254 /** 255 * Defines the output format. These constants are used with constructor. 256 */ 257 public static final class OutputFormat { 258 /* Do not change these values without updating their counterparts 259 * in include/media/stagefright/MediaMuxer.h! 260 */ OutputFormat()261 private OutputFormat() {} 262 /** @hide */ 263 public static final int MUXER_OUTPUT_FIRST = 0; 264 /** MPEG4 media file format*/ 265 public static final int MUXER_OUTPUT_MPEG_4 = MUXER_OUTPUT_FIRST; 266 /** WEBM media file format*/ 267 public static final int MUXER_OUTPUT_WEBM = MUXER_OUTPUT_FIRST + 1; 268 /** 3GPP media file format*/ 269 public static final int MUXER_OUTPUT_3GPP = MUXER_OUTPUT_FIRST + 2; 270 /** HEIF media file format*/ 271 public static final int MUXER_OUTPUT_HEIF = MUXER_OUTPUT_FIRST + 3; 272 /** Ogg media file format*/ 273 public static final int MUXER_OUTPUT_OGG = MUXER_OUTPUT_FIRST + 4; 274 /** @hide */ 275 public static final int MUXER_OUTPUT_LAST = MUXER_OUTPUT_OGG; 276 }; 277 278 /** @hide */ 279 @IntDef({ 280 OutputFormat.MUXER_OUTPUT_MPEG_4, 281 OutputFormat.MUXER_OUTPUT_WEBM, 282 OutputFormat.MUXER_OUTPUT_3GPP, 283 OutputFormat.MUXER_OUTPUT_HEIF, 284 OutputFormat.MUXER_OUTPUT_OGG, 285 }) 286 @Retention(RetentionPolicy.SOURCE) 287 public @interface Format {} 288 289 // All the native functions are listed here. 290 @UnsupportedAppUsage(maxTargetSdk = Build.VERSION_CODES.R, trackingBug = 170729553) nativeSetup(@onNull FileDescriptor fd, int format)291 private static native long nativeSetup(@NonNull FileDescriptor fd, int format) 292 throws IllegalArgumentException, IOException; 293 @UnsupportedAppUsage(maxTargetSdk = Build.VERSION_CODES.R, trackingBug = 170729553) nativeRelease(long nativeObject)294 private static native void nativeRelease(long nativeObject); nativeStart(long nativeObject)295 private static native void nativeStart(long nativeObject); nativeStop(long nativeObject)296 private static native void nativeStop(long nativeObject); nativeAddTrack( long nativeObject, @NonNull String[] keys, @NonNull Object[] values)297 private static native int nativeAddTrack( 298 long nativeObject, @NonNull String[] keys, @NonNull Object[] values); nativeSetOrientationHint( long nativeObject, int degrees)299 private static native void nativeSetOrientationHint( 300 long nativeObject, int degrees); nativeSetLocation(long nativeObject, int latitude, int longitude)301 private static native void nativeSetLocation(long nativeObject, int latitude, int longitude); nativeWriteSampleData( long nativeObject, int trackIndex, @NonNull ByteBuffer byteBuf, int offset, int size, long presentationTimeUs, @MediaCodec.BufferFlag int flags)302 private static native void nativeWriteSampleData( 303 long nativeObject, int trackIndex, @NonNull ByteBuffer byteBuf, 304 int offset, int size, long presentationTimeUs, @MediaCodec.BufferFlag int flags); 305 306 // Muxer internal states. 307 @UnsupportedAppUsage(maxTargetSdk = Build.VERSION_CODES.R, trackingBug = 170729553) 308 private static final int MUXER_STATE_UNINITIALIZED = -1; 309 private static final int MUXER_STATE_INITIALIZED = 0; 310 @UnsupportedAppUsage(maxTargetSdk = Build.VERSION_CODES.R, trackingBug = 170729553) 311 private static final int MUXER_STATE_STARTED = 1; 312 @UnsupportedAppUsage(maxTargetSdk = Build.VERSION_CODES.R, trackingBug = 170729553) 313 private static final int MUXER_STATE_STOPPED = 2; 314 315 @UnsupportedAppUsage(maxTargetSdk = Build.VERSION_CODES.R, trackingBug = 170729553) 316 private int mState = MUXER_STATE_UNINITIALIZED; 317 318 @UnsupportedAppUsage(maxTargetSdk = Build.VERSION_CODES.R, trackingBug = 170729553) 319 private final CloseGuard mCloseGuard = CloseGuard.get(); 320 private int mLastTrackIndex = -1; 321 322 @UnsupportedAppUsage(maxTargetSdk = Build.VERSION_CODES.R, trackingBug = 170729553) 323 private long mNativeObject; 324 convertMuxerStateCodeToString(int aState)325 private String convertMuxerStateCodeToString(int aState) { 326 switch (aState) { 327 case MUXER_STATE_UNINITIALIZED: 328 return "UNINITIALIZED"; 329 case MUXER_STATE_INITIALIZED: 330 return "INITIALIZED"; 331 case MUXER_STATE_STARTED: 332 return "STARTED"; 333 case MUXER_STATE_STOPPED: 334 return "STOPPED"; 335 default: 336 return "UNKNOWN"; 337 } 338 } 339 340 /** 341 * Constructor. 342 * Creates a media muxer that writes to the specified path. 343 * @param path The path of the output media file. 344 * @param format The format of the output media file. 345 * @see android.media.MediaMuxer.OutputFormat 346 * @throws IllegalArgumentException if path is invalid or format is not supported. 347 * @throws IOException if failed to open the file for write. 348 */ MediaMuxer(@onNull String path, @Format int format)349 public MediaMuxer(@NonNull String path, @Format int format) throws IOException { 350 if (path == null) { 351 throw new IllegalArgumentException("path must not be null"); 352 } 353 // Use RandomAccessFile so we can open the file with RW access; 354 // RW access allows the native writer to memory map the output file. 355 RandomAccessFile file = null; 356 try { 357 file = new RandomAccessFile(path, "rws"); 358 file.setLength(0); 359 FileDescriptor fd = file.getFD(); 360 setUpMediaMuxer(fd, format); 361 } finally { 362 if (file != null) { 363 file.close(); 364 } 365 } 366 } 367 368 /** 369 * Constructor. 370 * Creates a media muxer that writes to the specified FileDescriptor. File descriptor 371 * must be seekable and writable. Application should not use the file referenced 372 * by this file descriptor until {@link #stop}. It is the application's responsibility 373 * to close the file descriptor. It is safe to do so as soon as this call returns. 374 * @param fd The FileDescriptor of the output media file. 375 * @param format The format of the output media file. 376 * @see android.media.MediaMuxer.OutputFormat 377 * @throws IllegalArgumentException if fd is invalid or format is not supported. 378 * @throws IOException if failed to open the file for write. 379 */ MediaMuxer(@onNull FileDescriptor fd, @Format int format)380 public MediaMuxer(@NonNull FileDescriptor fd, @Format int format) throws IOException { 381 setUpMediaMuxer(fd, format); 382 } 383 setUpMediaMuxer(@onNull FileDescriptor fd, @Format int format)384 private void setUpMediaMuxer(@NonNull FileDescriptor fd, @Format int format) throws IOException { 385 if (format < OutputFormat.MUXER_OUTPUT_FIRST || format > OutputFormat.MUXER_OUTPUT_LAST) { 386 throw new IllegalArgumentException("format: " + format + " is invalid"); 387 } 388 mNativeObject = nativeSetup(fd, format); 389 mState = MUXER_STATE_INITIALIZED; 390 mCloseGuard.open("release"); 391 } 392 393 /** 394 * Sets the orientation hint for output video playback. 395 * <p>This method should be called before {@link #start}. Calling this 396 * method will not rotate the video frame when muxer is generating the file, 397 * but add a composition matrix containing the rotation angle in the output 398 * video if the output format is 399 * {@link OutputFormat#MUXER_OUTPUT_MPEG_4} so that a video player can 400 * choose the proper orientation for playback. Note that some video players 401 * may choose to ignore the composition matrix in a video during playback. 402 * By default, the rotation degree is 0.</p> 403 * @param degrees the angle to be rotated clockwise in degrees. 404 * The supported angles are 0, 90, 180, and 270 degrees. 405 * @throws IllegalArgumentException if degree is not supported. 406 * @throws IllegalStateException If this method is called after {@link #start}. 407 */ setOrientationHint(int degrees)408 public void setOrientationHint(int degrees) { 409 if (degrees != 0 && degrees != 90 && degrees != 180 && degrees != 270) { 410 throw new IllegalArgumentException("Unsupported angle: " + degrees); 411 } 412 if (mState == MUXER_STATE_INITIALIZED) { 413 nativeSetOrientationHint(mNativeObject, degrees); 414 } else { 415 throw new IllegalStateException("Can't set rotation degrees due" + 416 " to wrong state(" + convertMuxerStateCodeToString(mState) + ")"); 417 } 418 } 419 420 /** 421 * Set and store the geodata (latitude and longitude) in the output file. 422 * This method should be called before {@link #start}. The geodata is stored 423 * in udta box if the output format is 424 * {@link OutputFormat#MUXER_OUTPUT_MPEG_4}, and is ignored for other output 425 * formats. The geodata is stored according to ISO-6709 standard. 426 * 427 * @param latitude Latitude in degrees. Its value must be in the range [-90, 428 * 90]. 429 * @param longitude Longitude in degrees. Its value must be in the range 430 * [-180, 180]. 431 * @throws IllegalArgumentException If the given latitude or longitude is out 432 * of range. 433 * @throws IllegalStateException If this method is called after {@link #start}. 434 */ setLocation(float latitude, float longitude)435 public void setLocation(float latitude, float longitude) { 436 int latitudex10000 = (int) (latitude * 10000 + 0.5); 437 int longitudex10000 = (int) (longitude * 10000 + 0.5); 438 439 if (latitudex10000 > 900000 || latitudex10000 < -900000) { 440 String msg = "Latitude: " + latitude + " out of range."; 441 throw new IllegalArgumentException(msg); 442 } 443 if (longitudex10000 > 1800000 || longitudex10000 < -1800000) { 444 String msg = "Longitude: " + longitude + " out of range"; 445 throw new IllegalArgumentException(msg); 446 } 447 448 if (mState == MUXER_STATE_INITIALIZED && mNativeObject != 0) { 449 nativeSetLocation(mNativeObject, latitudex10000, longitudex10000); 450 } else { 451 throw new IllegalStateException("Can't set location due to wrong state(" 452 + convertMuxerStateCodeToString(mState) + ")"); 453 } 454 } 455 456 /** 457 * Starts the muxer. 458 * <p>Make sure this is called after {@link #addTrack} and before 459 * {@link #writeSampleData}.</p> 460 * @throws IllegalStateException If this method is called after {@link #start} 461 * or Muxer is released 462 */ start()463 public void start() { 464 if (mNativeObject == 0) { 465 throw new IllegalStateException("Muxer has been released!"); 466 } 467 if (mState == MUXER_STATE_INITIALIZED) { 468 nativeStart(mNativeObject); 469 mState = MUXER_STATE_STARTED; 470 } else { 471 throw new IllegalStateException("Can't start due to wrong state(" 472 + convertMuxerStateCodeToString(mState) + ")"); 473 } 474 } 475 476 /** 477 * Stops the muxer. 478 * <p>Once the muxer stops, it can not be restarted.</p> 479 * @throws IllegalStateException if muxer is in the wrong state. 480 */ stop()481 public void stop() { 482 if (mState == MUXER_STATE_STARTED) { 483 try { 484 nativeStop(mNativeObject); 485 } catch (Exception e) { 486 throw e; 487 } finally { 488 mState = MUXER_STATE_STOPPED; 489 } 490 } else { 491 throw new IllegalStateException("Can't stop due to wrong state(" 492 + convertMuxerStateCodeToString(mState) + ")"); 493 } 494 } 495 496 @Override finalize()497 protected void finalize() throws Throwable { 498 try { 499 if (mCloseGuard != null) { 500 mCloseGuard.warnIfOpen(); 501 } 502 if (mNativeObject != 0) { 503 nativeRelease(mNativeObject); 504 mNativeObject = 0; 505 } 506 } finally { 507 super.finalize(); 508 } 509 } 510 511 /** 512 * Adds a track with the specified format. 513 * <p> 514 * The following table summarizes support for specific format keys across android releases. 515 * Keys marked with '+:' are required. 516 * 517 * <table style="width: 0%"> 518 * <thead> 519 * <tr> 520 * <th rowspan=2>OS Version(s)</th> 521 * <td colspan=3>{@code MediaFormat} keys used for</th> 522 * </tr><tr> 523 * <th>All Tracks</th> 524 * <th>Audio Tracks</th> 525 * <th>Video Tracks</th> 526 * </tr> 527 * </thead> 528 * <tbody> 529 * <tr> 530 * <td>{@link android.os.Build.VERSION_CODES#JELLY_BEAN_MR2}</td> 531 * <td rowspan=7>+: {@link MediaFormat#KEY_MIME}</td> 532 * <td rowspan=3>+: {@link MediaFormat#KEY_SAMPLE_RATE},<br> 533 * +: {@link MediaFormat#KEY_CHANNEL_COUNT},<br> 534 * +: <strong>codec-specific data<sup>AAC</sup></strong></td> 535 * <td rowspan=5>+: {@link MediaFormat#KEY_WIDTH},<br> 536 * +: {@link MediaFormat#KEY_HEIGHT},<br> 537 * no {@code KEY_ROTATION}, 538 * use {@link #setOrientationHint setOrientationHint()}<sup>.mp4</sup>,<br> 539 * +: <strong>codec-specific data<sup>AVC, MPEG4</sup></strong></td> 540 * </tr><tr> 541 * <td>{@link android.os.Build.VERSION_CODES#KITKAT}</td> 542 * </tr><tr> 543 * <td>{@link android.os.Build.VERSION_CODES#KITKAT_WATCH}</td> 544 * </tr><tr> 545 * <td>{@link android.os.Build.VERSION_CODES#LOLLIPOP}</td> 546 * <td rowspan=4>as above, plus<br> 547 * +: <strong>codec-specific data<sup>Vorbis & .webm</sup></strong></td> 548 * </tr><tr> 549 * <td>{@link android.os.Build.VERSION_CODES#LOLLIPOP_MR1}</td> 550 * </tr><tr> 551 * <td>{@link android.os.Build.VERSION_CODES#M}</td> 552 * <td>as above, plus<br> 553 * {@link MediaFormat#KEY_BIT_RATE}<sup>AAC</sup></td> 554 * </tr><tr> 555 * <td>{@link android.os.Build.VERSION_CODES#N}</td> 556 * <td>as above, plus<br> 557 * <!-- {link MediaFormat#KEY_MAX_BIT_RATE}<sup>AAC, MPEG4</sup>,<br> --> 558 * {@link MediaFormat#KEY_BIT_RATE}<sup>MPEG4</sup>,<br> 559 * {@link MediaFormat#KEY_HDR_STATIC_INFO}<sup>#, .webm</sup>,<br> 560 * {@link MediaFormat#KEY_COLOR_STANDARD}<sup>#</sup>,<br> 561 * {@link MediaFormat#KEY_COLOR_TRANSFER}<sup>#</sup>,<br> 562 * {@link MediaFormat#KEY_COLOR_RANGE}<sup>#</sup>,<br> 563 * +: <strong>codec-specific data<sup>HEVC</sup></strong>,<br> 564 * codec-specific data<sup>VP9</sup></td> 565 * </tr> 566 * <tr> 567 * <td colspan=4> 568 * <p class=note><strong>Notes:</strong><br> 569 * #: storing into container metadata.<br> 570 * .mp4, .webm…: for listed containers<br> 571 * MPEG4, AAC…: for listed codecs 572 * </td> 573 * </tr><tr> 574 * <td colspan=4> 575 * <p class=note>Note that the codec-specific data for the track must be specified using 576 * this method. Furthermore, codec-specific data must not be passed/specified via the 577 * {@link #writeSampleData writeSampleData()} call. 578 * </td> 579 * </tr> 580 * </tbody> 581 * </table> 582 * 583 * <p> 584 * The following table summarizes codec support for containers across android releases: 585 * 586 * <table style="width: 0%"> 587 * <thead> 588 * <tr> 589 * <th rowspan=2>OS Version(s)</th> 590 * <td colspan=3>Codec support</th> 591 * </tr><tr> 592 * <th>{@linkplain OutputFormat#MUXER_OUTPUT_MPEG_4 MP4}</th> 593 * <th>{@linkplain OutputFormat#MUXER_OUTPUT_WEBM WEBM}</th> 594 * </tr> 595 * </thead> 596 * <tbody> 597 * <tr> 598 * <td>{@link android.os.Build.VERSION_CODES#JELLY_BEAN_MR2}</td> 599 * <td rowspan=6>{@link MediaFormat#MIMETYPE_AUDIO_AAC AAC},<br> 600 * {@link MediaFormat#MIMETYPE_AUDIO_AMR_NB NB-AMR},<br> 601 * {@link MediaFormat#MIMETYPE_AUDIO_AMR_WB WB-AMR},<br> 602 * {@link MediaFormat#MIMETYPE_VIDEO_H263 H.263},<br> 603 * {@link MediaFormat#MIMETYPE_VIDEO_MPEG4 MPEG-4},<br> 604 * {@link MediaFormat#MIMETYPE_VIDEO_AVC AVC} (H.264)</td> 605 * <td rowspan=3>Not supported</td> 606 * </tr><tr> 607 * <td>{@link android.os.Build.VERSION_CODES#KITKAT}</td> 608 * </tr><tr> 609 * <td>{@link android.os.Build.VERSION_CODES#KITKAT_WATCH}</td> 610 * </tr><tr> 611 * <td>{@link android.os.Build.VERSION_CODES#LOLLIPOP}</td> 612 * <td rowspan=3>{@link MediaFormat#MIMETYPE_AUDIO_VORBIS Vorbis},<br> 613 * {@link MediaFormat#MIMETYPE_VIDEO_VP8 VP8}</td> 614 * </tr><tr> 615 * <td>{@link android.os.Build.VERSION_CODES#LOLLIPOP_MR1}</td> 616 * </tr><tr> 617 * <td>{@link android.os.Build.VERSION_CODES#M}</td> 618 * </tr><tr> 619 * <td>{@link android.os.Build.VERSION_CODES#N}</td> 620 * <td>as above, plus<br> 621 * {@link MediaFormat#MIMETYPE_VIDEO_HEVC HEVC} (H.265)</td> 622 * <td>as above, plus<br> 623 * {@link MediaFormat#MIMETYPE_VIDEO_VP9 VP9}</td> 624 * </tr> 625 * </tbody> 626 * </table> 627 * 628 * @param format The media format for the track. This must not be an empty 629 * MediaFormat. 630 * @return The track index for this newly added track, and it should be used 631 * in the {@link #writeSampleData}. 632 * @throws IllegalArgumentException if format is invalid. 633 * @throws IllegalStateException if muxer is in the wrong state. 634 */ addTrack(@onNull MediaFormat format)635 public int addTrack(@NonNull MediaFormat format) { 636 if (format == null) { 637 throw new IllegalArgumentException("format must not be null."); 638 } 639 if (mState != MUXER_STATE_INITIALIZED) { 640 throw new IllegalStateException("Muxer is not initialized."); 641 } 642 if (mNativeObject == 0) { 643 throw new IllegalStateException("Muxer has been released!"); 644 } 645 int trackIndex = -1; 646 // Convert the MediaFormat into key-value pairs and send to the native. 647 Map<String, Object> formatMap = format.getMap(); 648 649 String[] keys = null; 650 Object[] values = null; 651 int mapSize = formatMap.size(); 652 if (mapSize > 0) { 653 keys = new String[mapSize]; 654 values = new Object[mapSize]; 655 int i = 0; 656 for (Map.Entry<String, Object> entry : formatMap.entrySet()) { 657 keys[i] = entry.getKey(); 658 values[i] = entry.getValue(); 659 ++i; 660 } 661 trackIndex = nativeAddTrack(mNativeObject, keys, values); 662 } else { 663 throw new IllegalArgumentException("format must not be empty."); 664 } 665 666 // Track index number is expected to incremented as addTrack succeed. 667 // However, if format is invalid, it will get a negative trackIndex. 668 if (mLastTrackIndex >= trackIndex) { 669 throw new IllegalArgumentException("Invalid format."); 670 } 671 mLastTrackIndex = trackIndex; 672 return trackIndex; 673 } 674 675 /** 676 * Writes an encoded sample into the muxer. 677 * <p>The application needs to make sure that the samples are written into 678 * the right tracks. Also, it needs to make sure the samples for each track 679 * are written in chronological order (e.g. in the order they are provided 680 * by the encoder.)</p> 681 * <p> For MPEG4 media format, the duration of the last sample in a track can be set by passing 682 * an additional empty buffer(bufferInfo.size = 0) with MediaCodec.BUFFER_FLAG_END_OF_STREAM 683 * flag and a suitable presentation timestamp set in bufferInfo parameter as the last sample of 684 * that track. This last sample's presentation timestamp shall be a sum of the presentation 685 * timestamp and the duration preferred for the original last sample. If no explicit 686 * END_OF_STREAM sample was passed, then the duration of the last sample would be the same as 687 * that of the sample before that.</p> 688 * @param byteBuf The encoded sample. 689 * @param trackIndex The track index for this sample. 690 * @param bufferInfo The buffer information related to this sample. 691 * @throws IllegalArgumentException if trackIndex, byteBuf or bufferInfo is invalid. 692 * @throws IllegalStateException if muxer is in wrong state. 693 * MediaMuxer uses the flags provided in {@link MediaCodec.BufferInfo}, 694 * to signal sync frames. 695 */ writeSampleData(int trackIndex, @NonNull ByteBuffer byteBuf, @NonNull BufferInfo bufferInfo)696 public void writeSampleData(int trackIndex, @NonNull ByteBuffer byteBuf, 697 @NonNull BufferInfo bufferInfo) { 698 if (trackIndex < 0 || trackIndex > mLastTrackIndex) { 699 throw new IllegalArgumentException("trackIndex is invalid"); 700 } 701 702 if (byteBuf == null) { 703 throw new IllegalArgumentException("byteBuffer must not be null"); 704 } 705 706 if (bufferInfo == null) { 707 throw new IllegalArgumentException("bufferInfo must not be null"); 708 } 709 if (bufferInfo.size < 0 || bufferInfo.offset < 0 710 || (bufferInfo.offset + bufferInfo.size) > byteBuf.capacity()) { 711 throw new IllegalArgumentException("bufferInfo must specify a" + 712 " valid buffer offset and size"); 713 } 714 715 if (mNativeObject == 0) { 716 throw new IllegalStateException("Muxer has been released!"); 717 } 718 719 if (mState != MUXER_STATE_STARTED) { 720 throw new IllegalStateException("Can't write, muxer is not started"); 721 } 722 723 nativeWriteSampleData(mNativeObject, trackIndex, byteBuf, 724 bufferInfo.offset, bufferInfo.size, 725 bufferInfo.presentationTimeUs, bufferInfo.flags); 726 } 727 728 /** 729 * Make sure you call this when you're done to free up any resources 730 * instead of relying on the garbage collector to do this for you at 731 * some point in the future. 732 */ release()733 public void release() { 734 if (mState == MUXER_STATE_STARTED) { 735 stop(); 736 } 737 if (mNativeObject != 0) { 738 nativeRelease(mNativeObject); 739 mNativeObject = 0; 740 mCloseGuard.close(); 741 } 742 mState = MUXER_STATE_UNINITIALIZED; 743 } 744 } 745