1 /* 2 * Copyright 2017 The WebRTC project authors. All Rights Reserved. 3 * 4 * Use of this source code is governed by a BSD-style license 5 * that can be found in the LICENSE file in the root of the source 6 * tree. An additional intellectual property rights grant can be found 7 * in the file PATENTS. All contributing project authors may 8 * be found in the AUTHORS file in the root of the source tree. 9 */ 10 11 package org.webrtc; 12 13 import android.media.MediaCodec; 14 import android.media.MediaCodecInfo; 15 import android.media.MediaFormat; 16 import android.opengl.GLES20; 17 import android.os.Build; 18 import android.os.Bundle; 19 import android.view.Surface; 20 import androidx.annotation.Nullable; 21 import java.io.IOException; 22 import java.nio.ByteBuffer; 23 import java.util.Map; 24 import java.util.concurrent.BlockingDeque; 25 import java.util.concurrent.LinkedBlockingDeque; 26 import java.util.concurrent.TimeUnit; 27 import org.webrtc.ThreadUtils.ThreadChecker; 28 29 /** 30 * Android hardware video encoder. 31 */ 32 class HardwareVideoEncoder implements VideoEncoder { 33 private static final String TAG = "HardwareVideoEncoder"; 34 35 // Bitrate modes - should be in sync with OMX_VIDEO_CONTROLRATETYPE defined 36 // in OMX_Video.h 37 private static final int VIDEO_ControlRateConstant = 2; 38 // Key associated with the bitrate control mode value (above). Not present as a MediaFormat 39 // constant until API level 21. 40 private static final String KEY_BITRATE_MODE = "bitrate-mode"; 41 42 private static final int VIDEO_AVC_PROFILE_HIGH = 8; 43 private static final int VIDEO_AVC_LEVEL_3 = 0x100; 44 45 private static final int MAX_VIDEO_FRAMERATE = 30; 46 47 // See MAX_ENCODER_Q_SIZE in androidmediaencoder.cc. 48 private static final int MAX_ENCODER_Q_SIZE = 2; 49 50 private static final int MEDIA_CODEC_RELEASE_TIMEOUT_MS = 5000; 51 private static final int DEQUEUE_OUTPUT_BUFFER_TIMEOUT_US = 100000; 52 53 // Size of the input frames should be multiple of 16 for the H/W encoder. 54 private static final int REQUIRED_RESOLUTION_ALIGNMENT = 16; 55 56 /** 57 * Keeps track of the number of output buffers that have been passed down the pipeline and not yet 58 * released. We need to wait for this to go down to zero before operations invalidating the output 59 * buffers, i.e., stop() and getOutputBuffer(). 60 */ 61 private static class BusyCount { 62 private final Object countLock = new Object(); 63 private int count; 64 increment()65 public void increment() { 66 synchronized (countLock) { 67 count++; 68 } 69 } 70 71 // This method may be called on an arbitrary thread. decrement()72 public void decrement() { 73 synchronized (countLock) { 74 count--; 75 if (count == 0) { 76 countLock.notifyAll(); 77 } 78 } 79 } 80 81 // The increment and waitForZero methods are called on the same thread (deliverEncodedImage, 82 // running on the output thread). Hence, after waitForZero returns, the count will stay zero 83 // until the same thread calls increment. waitForZero()84 public void waitForZero() { 85 boolean wasInterrupted = false; 86 synchronized (countLock) { 87 while (count > 0) { 88 try { 89 countLock.wait(); 90 } catch (InterruptedException e) { 91 Logging.e(TAG, "Interrupted while waiting on busy count", e); 92 wasInterrupted = true; 93 } 94 } 95 } 96 97 if (wasInterrupted) { 98 Thread.currentThread().interrupt(); 99 } 100 } 101 } 102 // --- Initialized on construction. 103 private final MediaCodecWrapperFactory mediaCodecWrapperFactory; 104 private final String codecName; 105 private final VideoCodecMimeType codecType; 106 private final Integer surfaceColorFormat; 107 private final Integer yuvColorFormat; 108 private final YuvFormat yuvFormat; 109 private final Map<String, String> params; 110 private final int keyFrameIntervalSec; // Base interval for generating key frames. 111 // Interval at which to force a key frame. Used to reduce color distortions caused by some 112 // Qualcomm video encoders. 113 private final long forcedKeyFrameNs; 114 private final BitrateAdjuster bitrateAdjuster; 115 // EGL context shared with the application. Used to access texture inputs. 116 private final EglBase14.Context sharedContext; 117 118 // Drawer used to draw input textures onto the codec's input surface. 119 private final GlRectDrawer textureDrawer = new GlRectDrawer(); 120 private final VideoFrameDrawer videoFrameDrawer = new VideoFrameDrawer(); 121 // A queue of EncodedImage.Builders that correspond to frames in the codec. These builders are 122 // pre-populated with all the information that can't be sent through MediaCodec. 123 private final BlockingDeque<EncodedImage.Builder> outputBuilders = new LinkedBlockingDeque<>(); 124 125 private final ThreadChecker encodeThreadChecker = new ThreadChecker(); 126 private final ThreadChecker outputThreadChecker = new ThreadChecker(); 127 private final BusyCount outputBuffersBusyCount = new BusyCount(); 128 129 // --- Set on initialize and immutable until release. 130 private Callback callback; 131 private boolean automaticResizeOn; 132 133 // --- Valid and immutable while an encoding session is running. 134 @Nullable private MediaCodecWrapper codec; 135 // Thread that delivers encoded frames to the user callback. 136 @Nullable private Thread outputThread; 137 138 // EGL base wrapping the shared texture context. Holds hooks to both the shared context and the 139 // input surface. Making this base current allows textures from the context to be drawn onto the 140 // surface. 141 @Nullable private EglBase14 textureEglBase; 142 // Input surface for the codec. The encoder will draw input textures onto this surface. 143 @Nullable private Surface textureInputSurface; 144 145 private int width; 146 private int height; 147 // Y-plane strides in the encoder's input 148 private int stride; 149 // Y-plane slice-height in the encoder's input 150 private int sliceHeight; 151 private boolean useSurfaceMode; 152 153 // --- Only accessed from the encoding thread. 154 // Presentation timestamp of next frame to encode. 155 private long nextPresentationTimestampUs; 156 // Presentation timestamp of the last requested (or forced) key frame. 157 private long lastKeyFrameNs; 158 159 // --- Only accessed on the output thread. 160 // Contents of the last observed config frame output by the MediaCodec. Used by H.264. 161 @Nullable private ByteBuffer configBuffer; 162 private int adjustedBitrate; 163 164 // Whether the encoder is running. Volatile so that the output thread can watch this value and 165 // exit when the encoder stops. 166 private volatile boolean running; 167 // Any exception thrown during shutdown. The output thread releases the MediaCodec and uses this 168 // value to send exceptions thrown during release back to the encoder thread. 169 @Nullable private volatile Exception shutdownException; 170 171 /** 172 * Creates a new HardwareVideoEncoder with the given codecName, codecType, colorFormat, key frame 173 * intervals, and bitrateAdjuster. 174 * 175 * @param codecName the hardware codec implementation to use 176 * @param codecType the type of the given video codec (eg. VP8, VP9, H264 or AV1) 177 * @param surfaceColorFormat color format for surface mode or null if not available 178 * @param yuvColorFormat color format for bytebuffer mode 179 * @param keyFrameIntervalSec interval in seconds between key frames; used to initialize the codec 180 * @param forceKeyFrameIntervalMs interval at which to force a key frame if one is not requested; 181 * used to reduce distortion caused by some codec implementations 182 * @param bitrateAdjuster algorithm used to correct codec implementations that do not produce the 183 * desired bitrates 184 * @throws IllegalArgumentException if colorFormat is unsupported 185 */ HardwareVideoEncoder(MediaCodecWrapperFactory mediaCodecWrapperFactory, String codecName, VideoCodecMimeType codecType, Integer surfaceColorFormat, Integer yuvColorFormat, Map<String, String> params, int keyFrameIntervalSec, int forceKeyFrameIntervalMs, BitrateAdjuster bitrateAdjuster, EglBase14.Context sharedContext)186 public HardwareVideoEncoder(MediaCodecWrapperFactory mediaCodecWrapperFactory, String codecName, 187 VideoCodecMimeType codecType, Integer surfaceColorFormat, Integer yuvColorFormat, 188 Map<String, String> params, int keyFrameIntervalSec, int forceKeyFrameIntervalMs, 189 BitrateAdjuster bitrateAdjuster, EglBase14.Context sharedContext) { 190 this.mediaCodecWrapperFactory = mediaCodecWrapperFactory; 191 this.codecName = codecName; 192 this.codecType = codecType; 193 this.surfaceColorFormat = surfaceColorFormat; 194 this.yuvColorFormat = yuvColorFormat; 195 this.yuvFormat = YuvFormat.valueOf(yuvColorFormat); 196 this.params = params; 197 this.keyFrameIntervalSec = keyFrameIntervalSec; 198 this.forcedKeyFrameNs = TimeUnit.MILLISECONDS.toNanos(forceKeyFrameIntervalMs); 199 this.bitrateAdjuster = bitrateAdjuster; 200 this.sharedContext = sharedContext; 201 202 // Allow construction on a different thread. 203 encodeThreadChecker.detachThread(); 204 } 205 206 @Override initEncode(Settings settings, Callback callback)207 public VideoCodecStatus initEncode(Settings settings, Callback callback) { 208 encodeThreadChecker.checkIsOnValidThread(); 209 210 this.callback = callback; 211 automaticResizeOn = settings.automaticResizeOn; 212 213 if (settings.width % REQUIRED_RESOLUTION_ALIGNMENT != 0 214 || settings.height % REQUIRED_RESOLUTION_ALIGNMENT != 0) { 215 Logging.e(TAG, "MediaCodec is only tested with resolutions that are 16x16 aligned."); 216 return VideoCodecStatus.ERR_SIZE; 217 } 218 this.width = settings.width; 219 this.height = settings.height; 220 useSurfaceMode = canUseSurface(); 221 222 if (settings.startBitrate != 0 && settings.maxFramerate != 0) { 223 bitrateAdjuster.setTargets(settings.startBitrate * 1000, settings.maxFramerate); 224 } 225 adjustedBitrate = bitrateAdjuster.getAdjustedBitrateBps(); 226 227 Logging.d(TAG, 228 "initEncode: " + width + " x " + height + ". @ " + settings.startBitrate 229 + "kbps. Fps: " + settings.maxFramerate + " Use surface mode: " + useSurfaceMode); 230 return initEncodeInternal(); 231 } 232 initEncodeInternal()233 private VideoCodecStatus initEncodeInternal() { 234 encodeThreadChecker.checkIsOnValidThread(); 235 236 nextPresentationTimestampUs = 0; 237 lastKeyFrameNs = -1; 238 239 try { 240 codec = mediaCodecWrapperFactory.createByCodecName(codecName); 241 } catch (IOException | IllegalArgumentException e) { 242 Logging.e(TAG, "Cannot create media encoder " + codecName); 243 return VideoCodecStatus.FALLBACK_SOFTWARE; 244 } 245 246 final int colorFormat = useSurfaceMode ? surfaceColorFormat : yuvColorFormat; 247 try { 248 MediaFormat format = MediaFormat.createVideoFormat(codecType.mimeType(), width, height); 249 format.setInteger(MediaFormat.KEY_BIT_RATE, adjustedBitrate); 250 format.setInteger(KEY_BITRATE_MODE, VIDEO_ControlRateConstant); 251 format.setInteger(MediaFormat.KEY_COLOR_FORMAT, colorFormat); 252 format.setFloat( 253 MediaFormat.KEY_FRAME_RATE, (float) bitrateAdjuster.getAdjustedFramerateFps()); 254 format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, keyFrameIntervalSec); 255 if (codecType == VideoCodecMimeType.H264) { 256 String profileLevelId = params.get(VideoCodecInfo.H264_FMTP_PROFILE_LEVEL_ID); 257 if (profileLevelId == null) { 258 profileLevelId = VideoCodecInfo.H264_CONSTRAINED_BASELINE_3_1; 259 } 260 switch (profileLevelId) { 261 case VideoCodecInfo.H264_CONSTRAINED_HIGH_3_1: 262 format.setInteger("profile", VIDEO_AVC_PROFILE_HIGH); 263 format.setInteger("level", VIDEO_AVC_LEVEL_3); 264 break; 265 case VideoCodecInfo.H264_CONSTRAINED_BASELINE_3_1: 266 break; 267 default: 268 Logging.w(TAG, "Unknown profile level id: " + profileLevelId); 269 } 270 } 271 Logging.d(TAG, "Format: " + format); 272 codec.configure( 273 format, null /* surface */, null /* crypto */, MediaCodec.CONFIGURE_FLAG_ENCODE); 274 275 if (useSurfaceMode) { 276 textureEglBase = EglBase.createEgl14(sharedContext, EglBase.CONFIG_RECORDABLE); 277 textureInputSurface = codec.createInputSurface(); 278 textureEglBase.createSurface(textureInputSurface); 279 textureEglBase.makeCurrent(); 280 } 281 282 MediaFormat inputFormat = codec.getInputFormat(); 283 stride = getStride(inputFormat, width); 284 sliceHeight = getSliceHeight(inputFormat, height); 285 286 codec.start(); 287 } catch (IllegalStateException e) { 288 Logging.e(TAG, "initEncodeInternal failed", e); 289 release(); 290 return VideoCodecStatus.FALLBACK_SOFTWARE; 291 } 292 293 running = true; 294 outputThreadChecker.detachThread(); 295 outputThread = createOutputThread(); 296 outputThread.start(); 297 298 return VideoCodecStatus.OK; 299 } 300 301 @Override release()302 public VideoCodecStatus release() { 303 encodeThreadChecker.checkIsOnValidThread(); 304 305 final VideoCodecStatus returnValue; 306 if (outputThread == null) { 307 returnValue = VideoCodecStatus.OK; 308 } else { 309 // The outputThread actually stops and releases the codec once running is false. 310 running = false; 311 if (!ThreadUtils.joinUninterruptibly(outputThread, MEDIA_CODEC_RELEASE_TIMEOUT_MS)) { 312 Logging.e(TAG, "Media encoder release timeout"); 313 returnValue = VideoCodecStatus.TIMEOUT; 314 } else if (shutdownException != null) { 315 // Log the exception and turn it into an error. 316 Logging.e(TAG, "Media encoder release exception", shutdownException); 317 returnValue = VideoCodecStatus.ERROR; 318 } else { 319 returnValue = VideoCodecStatus.OK; 320 } 321 } 322 323 textureDrawer.release(); 324 videoFrameDrawer.release(); 325 if (textureEglBase != null) { 326 textureEglBase.release(); 327 textureEglBase = null; 328 } 329 if (textureInputSurface != null) { 330 textureInputSurface.release(); 331 textureInputSurface = null; 332 } 333 outputBuilders.clear(); 334 335 codec = null; 336 outputThread = null; 337 338 // Allow changing thread after release. 339 encodeThreadChecker.detachThread(); 340 341 return returnValue; 342 } 343 344 @Override encode(VideoFrame videoFrame, EncodeInfo encodeInfo)345 public VideoCodecStatus encode(VideoFrame videoFrame, EncodeInfo encodeInfo) { 346 encodeThreadChecker.checkIsOnValidThread(); 347 if (codec == null) { 348 return VideoCodecStatus.UNINITIALIZED; 349 } 350 351 final VideoFrame.Buffer videoFrameBuffer = videoFrame.getBuffer(); 352 final boolean isTextureBuffer = videoFrameBuffer instanceof VideoFrame.TextureBuffer; 353 354 // If input resolution changed, restart the codec with the new resolution. 355 final int frameWidth = videoFrame.getBuffer().getWidth(); 356 final int frameHeight = videoFrame.getBuffer().getHeight(); 357 final boolean shouldUseSurfaceMode = canUseSurface() && isTextureBuffer; 358 if (frameWidth != width || frameHeight != height || shouldUseSurfaceMode != useSurfaceMode) { 359 VideoCodecStatus status = resetCodec(frameWidth, frameHeight, shouldUseSurfaceMode); 360 if (status != VideoCodecStatus.OK) { 361 return status; 362 } 363 } 364 365 if (outputBuilders.size() > MAX_ENCODER_Q_SIZE) { 366 // Too many frames in the encoder. Drop this frame. 367 Logging.e(TAG, "Dropped frame, encoder queue full"); 368 return VideoCodecStatus.NO_OUTPUT; // See webrtc bug 2887. 369 } 370 371 boolean requestedKeyFrame = false; 372 for (EncodedImage.FrameType frameType : encodeInfo.frameTypes) { 373 if (frameType == EncodedImage.FrameType.VideoFrameKey) { 374 requestedKeyFrame = true; 375 } 376 } 377 378 if (requestedKeyFrame || shouldForceKeyFrame(videoFrame.getTimestampNs())) { 379 requestKeyFrame(videoFrame.getTimestampNs()); 380 } 381 382 // Number of bytes in the video buffer. Y channel is sampled at one byte per pixel; U and V are 383 // subsampled at one byte per four pixels. 384 int bufferSize = videoFrameBuffer.getHeight() * videoFrameBuffer.getWidth() * 3 / 2; 385 EncodedImage.Builder builder = EncodedImage.builder() 386 .setCaptureTimeNs(videoFrame.getTimestampNs()) 387 .setEncodedWidth(videoFrame.getBuffer().getWidth()) 388 .setEncodedHeight(videoFrame.getBuffer().getHeight()) 389 .setRotation(videoFrame.getRotation()); 390 outputBuilders.offer(builder); 391 392 long presentationTimestampUs = nextPresentationTimestampUs; 393 // Round frame duration down to avoid bitrate overshoot. 394 long frameDurationUs = 395 (long) (TimeUnit.SECONDS.toMicros(1) / bitrateAdjuster.getAdjustedFramerateFps()); 396 nextPresentationTimestampUs += frameDurationUs; 397 398 final VideoCodecStatus returnValue; 399 if (useSurfaceMode) { 400 returnValue = encodeTextureBuffer(videoFrame, presentationTimestampUs); 401 } else { 402 returnValue = 403 encodeByteBuffer(videoFrame, presentationTimestampUs, videoFrameBuffer, bufferSize); 404 } 405 406 // Check if the queue was successful. 407 if (returnValue != VideoCodecStatus.OK) { 408 // Keep the output builders in sync with buffers in the codec. 409 outputBuilders.pollLast(); 410 } 411 412 return returnValue; 413 } 414 encodeTextureBuffer( VideoFrame videoFrame, long presentationTimestampUs)415 private VideoCodecStatus encodeTextureBuffer( 416 VideoFrame videoFrame, long presentationTimestampUs) { 417 encodeThreadChecker.checkIsOnValidThread(); 418 try { 419 // TODO(perkj): glClear() shouldn't be necessary since every pixel is covered anyway, 420 // but it's a workaround for bug webrtc:5147. 421 GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT); 422 // It is not necessary to release this frame because it doesn't own the buffer. 423 VideoFrame derotatedFrame = 424 new VideoFrame(videoFrame.getBuffer(), 0 /* rotation */, videoFrame.getTimestampNs()); 425 videoFrameDrawer.drawFrame(derotatedFrame, textureDrawer, null /* additionalRenderMatrix */); 426 textureEglBase.swapBuffers(TimeUnit.MICROSECONDS.toNanos(presentationTimestampUs)); 427 } catch (RuntimeException e) { 428 Logging.e(TAG, "encodeTexture failed", e); 429 return VideoCodecStatus.ERROR; 430 } 431 return VideoCodecStatus.OK; 432 } 433 encodeByteBuffer(VideoFrame videoFrame, long presentationTimestampUs, VideoFrame.Buffer videoFrameBuffer, int bufferSize)434 private VideoCodecStatus encodeByteBuffer(VideoFrame videoFrame, long presentationTimestampUs, 435 VideoFrame.Buffer videoFrameBuffer, int bufferSize) { 436 encodeThreadChecker.checkIsOnValidThread(); 437 // No timeout. Don't block for an input buffer, drop frames if the encoder falls behind. 438 int index; 439 try { 440 index = codec.dequeueInputBuffer(0 /* timeout */); 441 } catch (IllegalStateException e) { 442 Logging.e(TAG, "dequeueInputBuffer failed", e); 443 return VideoCodecStatus.ERROR; 444 } 445 446 if (index == -1) { 447 // Encoder is falling behind. No input buffers available. Drop the frame. 448 Logging.d(TAG, "Dropped frame, no input buffers available"); 449 return VideoCodecStatus.NO_OUTPUT; // See webrtc bug 2887. 450 } 451 452 ByteBuffer buffer; 453 try { 454 buffer = codec.getInputBuffer(index); 455 } catch (IllegalStateException e) { 456 Logging.e(TAG, "getInputBuffer with index=" + index + " failed", e); 457 return VideoCodecStatus.ERROR; 458 } 459 fillInputBuffer(buffer, videoFrameBuffer); 460 461 try { 462 codec.queueInputBuffer( 463 index, 0 /* offset */, bufferSize, presentationTimestampUs, 0 /* flags */); 464 } catch (IllegalStateException e) { 465 Logging.e(TAG, "queueInputBuffer failed", e); 466 // IllegalStateException thrown when the codec is in the wrong state. 467 return VideoCodecStatus.ERROR; 468 } 469 return VideoCodecStatus.OK; 470 } 471 472 @Override setRateAllocation(BitrateAllocation bitrateAllocation, int framerate)473 public VideoCodecStatus setRateAllocation(BitrateAllocation bitrateAllocation, int framerate) { 474 encodeThreadChecker.checkIsOnValidThread(); 475 if (framerate > MAX_VIDEO_FRAMERATE) { 476 framerate = MAX_VIDEO_FRAMERATE; 477 } 478 bitrateAdjuster.setTargets(bitrateAllocation.getSum(), framerate); 479 return VideoCodecStatus.OK; 480 } 481 482 @Override setRates(RateControlParameters rcParameters)483 public VideoCodecStatus setRates(RateControlParameters rcParameters) { 484 encodeThreadChecker.checkIsOnValidThread(); 485 bitrateAdjuster.setTargets(rcParameters.bitrate.getSum(), rcParameters.framerateFps); 486 return VideoCodecStatus.OK; 487 } 488 489 @Override getScalingSettings()490 public ScalingSettings getScalingSettings() { 491 encodeThreadChecker.checkIsOnValidThread(); 492 if (automaticResizeOn) { 493 if (codecType == VideoCodecMimeType.VP8) { 494 final int kLowVp8QpThreshold = 29; 495 final int kHighVp8QpThreshold = 95; 496 return new ScalingSettings(kLowVp8QpThreshold, kHighVp8QpThreshold); 497 } else if (codecType == VideoCodecMimeType.H264) { 498 final int kLowH264QpThreshold = 24; 499 final int kHighH264QpThreshold = 37; 500 return new ScalingSettings(kLowH264QpThreshold, kHighH264QpThreshold); 501 } 502 } 503 return ScalingSettings.OFF; 504 } 505 506 @Override getImplementationName()507 public String getImplementationName() { 508 return codecName; 509 } 510 511 @Override getEncoderInfo()512 public EncoderInfo getEncoderInfo() { 513 // Since our MediaCodec is guaranteed to encode 16-pixel-aligned frames only, we set alignment 514 // value to be 16. Additionally, this encoder produces a single stream. So it should not require 515 // alignment for all layers. 516 return new EncoderInfo( 517 /* requestedResolutionAlignment= */ REQUIRED_RESOLUTION_ALIGNMENT, 518 /* applyAlignmentToAllSimulcastLayers= */ false); 519 } 520 resetCodec(int newWidth, int newHeight, boolean newUseSurfaceMode)521 private VideoCodecStatus resetCodec(int newWidth, int newHeight, boolean newUseSurfaceMode) { 522 encodeThreadChecker.checkIsOnValidThread(); 523 VideoCodecStatus status = release(); 524 if (status != VideoCodecStatus.OK) { 525 return status; 526 } 527 528 if (newWidth % REQUIRED_RESOLUTION_ALIGNMENT != 0 529 || newHeight % REQUIRED_RESOLUTION_ALIGNMENT != 0) { 530 Logging.e(TAG, "MediaCodec is only tested with resolutions that are 16x16 aligned."); 531 return VideoCodecStatus.ERR_SIZE; 532 } 533 width = newWidth; 534 height = newHeight; 535 useSurfaceMode = newUseSurfaceMode; 536 return initEncodeInternal(); 537 } 538 shouldForceKeyFrame(long presentationTimestampNs)539 private boolean shouldForceKeyFrame(long presentationTimestampNs) { 540 encodeThreadChecker.checkIsOnValidThread(); 541 return forcedKeyFrameNs > 0 && presentationTimestampNs > lastKeyFrameNs + forcedKeyFrameNs; 542 } 543 requestKeyFrame(long presentationTimestampNs)544 private void requestKeyFrame(long presentationTimestampNs) { 545 encodeThreadChecker.checkIsOnValidThread(); 546 // Ideally MediaCodec would honor BUFFER_FLAG_SYNC_FRAME so we could 547 // indicate this in queueInputBuffer() below and guarantee _this_ frame 548 // be encoded as a key frame, but sadly that flag is ignored. Instead, 549 // we request a key frame "soon". 550 try { 551 Bundle b = new Bundle(); 552 b.putInt(MediaCodec.PARAMETER_KEY_REQUEST_SYNC_FRAME, 0); 553 codec.setParameters(b); 554 } catch (IllegalStateException e) { 555 Logging.e(TAG, "requestKeyFrame failed", e); 556 return; 557 } 558 lastKeyFrameNs = presentationTimestampNs; 559 } 560 createOutputThread()561 private Thread createOutputThread() { 562 return new Thread() { 563 @Override 564 public void run() { 565 while (running) { 566 deliverEncodedImage(); 567 } 568 releaseCodecOnOutputThread(); 569 } 570 }; 571 } 572 573 // Visible for testing. 574 protected void deliverEncodedImage() { 575 outputThreadChecker.checkIsOnValidThread(); 576 try { 577 MediaCodec.BufferInfo info = new MediaCodec.BufferInfo(); 578 int index = codec.dequeueOutputBuffer(info, DEQUEUE_OUTPUT_BUFFER_TIMEOUT_US); 579 if (index < 0) { 580 if (index == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) { 581 outputBuffersBusyCount.waitForZero(); 582 } 583 return; 584 } 585 586 ByteBuffer codecOutputBuffer = codec.getOutputBuffer(index); 587 codecOutputBuffer.position(info.offset); 588 codecOutputBuffer.limit(info.offset + info.size); 589 590 if ((info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) { 591 Logging.d(TAG, "Config frame generated. Offset: " + info.offset + ". Size: " + info.size); 592 configBuffer = ByteBuffer.allocateDirect(info.size); 593 configBuffer.put(codecOutputBuffer); 594 } else { 595 bitrateAdjuster.reportEncodedFrame(info.size); 596 if (adjustedBitrate != bitrateAdjuster.getAdjustedBitrateBps()) { 597 updateBitrate(); 598 } 599 600 final boolean isKeyFrame = (info.flags & MediaCodec.BUFFER_FLAG_SYNC_FRAME) != 0; 601 if (isKeyFrame) { 602 Logging.d(TAG, "Sync frame generated"); 603 } 604 605 final ByteBuffer frameBuffer; 606 if (isKeyFrame && codecType == VideoCodecMimeType.H264) { 607 Logging.d(TAG, 608 "Prepending config frame of size " + configBuffer.capacity() 609 + " to output buffer with offset " + info.offset + ", size " + info.size); 610 // For H.264 key frame prepend SPS and PPS NALs at the start. 611 frameBuffer = ByteBuffer.allocateDirect(info.size + configBuffer.capacity()); 612 configBuffer.rewind(); 613 frameBuffer.put(configBuffer); 614 frameBuffer.put(codecOutputBuffer); 615 frameBuffer.rewind(); 616 } else { 617 frameBuffer = codecOutputBuffer.slice(); 618 } 619 620 final EncodedImage.FrameType frameType = isKeyFrame 621 ? EncodedImage.FrameType.VideoFrameKey 622 : EncodedImage.FrameType.VideoFrameDelta; 623 624 outputBuffersBusyCount.increment(); 625 EncodedImage.Builder builder = outputBuilders.poll(); 626 EncodedImage encodedImage = builder 627 .setBuffer(frameBuffer, 628 () -> { 629 // This callback should not throw any exceptions since 630 // it may be called on an arbitrary thread. 631 // Check bug webrtc:11230 for more details. 632 try { 633 codec.releaseOutputBuffer(index, false); 634 } catch (Exception e) { 635 Logging.e(TAG, "releaseOutputBuffer failed", e); 636 } 637 outputBuffersBusyCount.decrement(); 638 }) 639 .setFrameType(frameType) 640 .createEncodedImage(); 641 // TODO(mellem): Set codec-specific info. 642 callback.onEncodedFrame(encodedImage, new CodecSpecificInfo()); 643 // Note that the callback may have retained the image. 644 encodedImage.release(); 645 } 646 } catch (IllegalStateException e) { 647 Logging.e(TAG, "deliverOutput failed", e); 648 } 649 } 650 651 private void releaseCodecOnOutputThread() { 652 outputThreadChecker.checkIsOnValidThread(); 653 Logging.d(TAG, "Releasing MediaCodec on output thread"); 654 outputBuffersBusyCount.waitForZero(); 655 try { 656 codec.stop(); 657 } catch (Exception e) { 658 Logging.e(TAG, "Media encoder stop failed", e); 659 } 660 try { 661 codec.release(); 662 } catch (Exception e) { 663 Logging.e(TAG, "Media encoder release failed", e); 664 // Propagate exceptions caught during release back to the main thread. 665 shutdownException = e; 666 } 667 configBuffer = null; 668 Logging.d(TAG, "Release on output thread done"); 669 } 670 671 private VideoCodecStatus updateBitrate() { 672 outputThreadChecker.checkIsOnValidThread(); 673 adjustedBitrate = bitrateAdjuster.getAdjustedBitrateBps(); 674 try { 675 Bundle params = new Bundle(); 676 params.putInt(MediaCodec.PARAMETER_KEY_VIDEO_BITRATE, adjustedBitrate); 677 codec.setParameters(params); 678 return VideoCodecStatus.OK; 679 } catch (IllegalStateException e) { 680 Logging.e(TAG, "updateBitrate failed", e); 681 return VideoCodecStatus.ERROR; 682 } 683 } 684 685 private boolean canUseSurface() { 686 return sharedContext != null && surfaceColorFormat != null; 687 } 688 689 private static int getStride(MediaFormat inputFormat, int width) { 690 if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M && inputFormat != null 691 && inputFormat.containsKey(MediaFormat.KEY_STRIDE)) { 692 return inputFormat.getInteger(MediaFormat.KEY_STRIDE); 693 } 694 return width; 695 } 696 697 private static int getSliceHeight(MediaFormat inputFormat, int height) { 698 if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M && inputFormat != null 699 && inputFormat.containsKey(MediaFormat.KEY_SLICE_HEIGHT)) { 700 return inputFormat.getInteger(MediaFormat.KEY_SLICE_HEIGHT); 701 } 702 return height; 703 } 704 705 // Visible for testing. 706 protected void fillInputBuffer(ByteBuffer buffer, VideoFrame.Buffer videoFrameBuffer) { 707 yuvFormat.fillBuffer(buffer, videoFrameBuffer, stride, sliceHeight); 708 } 709 710 /** 711 * Enumeration of supported YUV color formats used for MediaCodec's input. 712 */ 713 private enum YuvFormat { 714 I420 { 715 @Override 716 void fillBuffer( 717 ByteBuffer dstBuffer, VideoFrame.Buffer srcBuffer, int dstStrideY, int dstSliceHeightY) { 718 /* 719 * According to the docs in Android MediaCodec, the stride of the U and V planes can be 720 * calculated based on the color format, though it is generally undefined and depends on the 721 * device and release. 722 * <p/> Assuming the width and height, dstStrideY and dstSliceHeightY are 723 * even, it works fine when we define the stride and slice-height of the dst U/V plane to be 724 * half of the dst Y plane. 725 */ 726 int dstStrideU = dstStrideY / 2; 727 int dstSliceHeight = dstSliceHeightY / 2; 728 VideoFrame.I420Buffer i420 = srcBuffer.toI420(); 729 YuvHelper.I420Copy(i420.getDataY(), i420.getStrideY(), i420.getDataU(), i420.getStrideU(), 730 i420.getDataV(), i420.getStrideV(), dstBuffer, i420.getWidth(), i420.getHeight(), 731 dstStrideY, dstSliceHeightY, dstStrideU, dstSliceHeight); 732 i420.release(); 733 } 734 }, 735 NV12 { 736 @Override 737 void fillBuffer( 738 ByteBuffer dstBuffer, VideoFrame.Buffer srcBuffer, int dstStrideY, int dstSliceHeightY) { 739 VideoFrame.I420Buffer i420 = srcBuffer.toI420(); 740 YuvHelper.I420ToNV12(i420.getDataY(), i420.getStrideY(), i420.getDataU(), i420.getStrideU(), 741 i420.getDataV(), i420.getStrideV(), dstBuffer, i420.getWidth(), i420.getHeight(), 742 dstStrideY, dstSliceHeightY); 743 i420.release(); 744 } 745 }; 746 747 abstract void fillBuffer( 748 ByteBuffer dstBuffer, VideoFrame.Buffer srcBuffer, int dstStrideY, int dstSliceHeightY); 749 750 static YuvFormat valueOf(int colorFormat) { 751 switch (colorFormat) { 752 case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar: 753 return I420; 754 case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar: 755 case MediaCodecInfo.CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar: 756 case MediaCodecUtils.COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m: 757 return NV12; 758 default: 759 throw new IllegalArgumentException("Unsupported colorFormat: " + colorFormat); 760 } 761 } 762 } 763 } 764