1 /* 2 * Copyright 2017 The WebRTC project authors. All Rights Reserved. 3 * 4 * Use of this source code is governed by a BSD-style license 5 * that can be found in the LICENSE file in the root of the source 6 * tree. An additional intellectual property rights grant can be found 7 * in the file PATENTS. All contributing project authors may 8 * be found in the AUTHORS file in the root of the source tree. 9 */ 10 11 package org.webrtc; 12 13 import static org.junit.Assert.assertEquals; 14 import static org.junit.Assert.assertNotEquals; 15 import static org.junit.Assert.assertNotNull; 16 import static org.junit.Assert.assertTrue; 17 import static org.junit.Assert.fail; 18 19 import android.graphics.Matrix; 20 import android.opengl.GLES11Ext; 21 import android.util.Log; 22 import androidx.annotation.Nullable; 23 import androidx.test.filters.SmallTest; 24 import java.nio.ByteBuffer; 25 import java.util.ArrayList; 26 import java.util.Arrays; 27 import java.util.Collection; 28 import java.util.List; 29 import java.util.concurrent.BlockingQueue; 30 import java.util.concurrent.LinkedBlockingQueue; 31 import java.util.concurrent.TimeUnit; 32 import org.junit.After; 33 import org.junit.Before; 34 import org.junit.Test; 35 import org.junit.runner.RunWith; 36 import org.junit.runners.Parameterized; 37 import org.junit.runners.Parameterized.Parameters; 38 39 @RunWith(Parameterized.class) 40 public class HardwareVideoEncoderTest { 41 @Parameters(name = "textures={0};eglContext={1}") parameters()42 public static Collection<Object[]> parameters() { 43 return Arrays.asList(new Object[] {/*textures=*/false, /*eglContext=*/false}, 44 new Object[] {/*textures=*/true, /*eglContext=*/false}, 45 new Object[] {/*textures=*/true, /*eglContext=*/true}); 46 } 47 48 private final boolean useTextures; 49 private final boolean useEglContext; 50 HardwareVideoEncoderTest(boolean useTextures, boolean useEglContext)51 public HardwareVideoEncoderTest(boolean useTextures, boolean useEglContext) { 52 this.useTextures = useTextures; 53 this.useEglContext = useEglContext; 54 } 55 56 final static String TAG = "HwVideoEncoderTest"; 57 58 private static final boolean ENABLE_INTEL_VP8_ENCODER = true; 59 private static final boolean ENABLE_H264_HIGH_PROFILE = true; 60 private static final VideoEncoder.Settings SETTINGS = 61 new VideoEncoder.Settings(1 /* core */, 640 /* width */, 480 /* height */, 300 /* kbps */, 62 30 /* fps */, 1 /* numberOfSimulcastStreams */, true /* automaticResizeOn */, 63 /* capabilities= */ new VideoEncoder.Capabilities(false /* lossNotification */)); 64 private static final int ENCODE_TIMEOUT_MS = 1000; 65 private static final int NUM_TEST_FRAMES = 10; 66 private static final int NUM_ENCODE_TRIES = 100; 67 private static final int ENCODE_RETRY_SLEEP_MS = 1; 68 private static final int PIXEL_ALIGNMENT_REQUIRED = 16; 69 private static final boolean APPLY_ALIGNMENT_TO_ALL_SIMULCAST_LAYERS = false; 70 71 // # Mock classes 72 /** 73 * Mock encoder callback that allows easy verification of the general properties of the encoded 74 * frame such as width and height. Also used from AndroidVideoDecoderInstrumentationTest. 75 */ 76 static class MockEncoderCallback implements VideoEncoder.Callback { 77 private BlockingQueue<EncodedImage> frameQueue = new LinkedBlockingQueue<>(); 78 79 @Override onEncodedFrame(EncodedImage frame, VideoEncoder.CodecSpecificInfo info)80 public void onEncodedFrame(EncodedImage frame, VideoEncoder.CodecSpecificInfo info) { 81 assertNotNull(frame); 82 assertNotNull(info); 83 84 // Make a copy because keeping a reference to the buffer is not allowed. 85 final ByteBuffer bufferCopy = ByteBuffer.allocateDirect(frame.buffer.remaining()); 86 bufferCopy.put(frame.buffer); 87 bufferCopy.rewind(); 88 89 frameQueue.offer(EncodedImage.builder() 90 .setBuffer(bufferCopy, null) 91 .setEncodedWidth(frame.encodedWidth) 92 .setEncodedHeight(frame.encodedHeight) 93 .setCaptureTimeNs(frame.captureTimeNs) 94 .setFrameType(frame.frameType) 95 .setRotation(frame.rotation) 96 .setQp(frame.qp) 97 .createEncodedImage()); 98 } 99 poll()100 public EncodedImage poll() { 101 try { 102 EncodedImage image = frameQueue.poll(ENCODE_TIMEOUT_MS, TimeUnit.MILLISECONDS); 103 assertNotNull("Timed out waiting for the frame to be encoded.", image); 104 return image; 105 } catch (InterruptedException e) { 106 throw new RuntimeException(e); 107 } 108 } 109 assertFrameEncoded(VideoFrame frame)110 public void assertFrameEncoded(VideoFrame frame) { 111 final VideoFrame.Buffer buffer = frame.getBuffer(); 112 final EncodedImage image = poll(); 113 assertTrue(image.buffer.capacity() > 0); 114 assertEquals(image.encodedWidth, buffer.getWidth()); 115 assertEquals(image.encodedHeight, buffer.getHeight()); 116 assertEquals(image.captureTimeNs, frame.getTimestampNs()); 117 assertEquals(image.rotation, frame.getRotation()); 118 } 119 } 120 121 /** A common base class for the texture and I420 buffer that implements reference counting. */ 122 private static abstract class MockBufferBase implements VideoFrame.Buffer { 123 protected final int width; 124 protected final int height; 125 private final Runnable releaseCallback; 126 private final Object refCountLock = new Object(); 127 private int refCount = 1; 128 MockBufferBase(int width, int height, Runnable releaseCallback)129 public MockBufferBase(int width, int height, Runnable releaseCallback) { 130 this.width = width; 131 this.height = height; 132 this.releaseCallback = releaseCallback; 133 } 134 135 @Override getWidth()136 public int getWidth() { 137 return width; 138 } 139 140 @Override getHeight()141 public int getHeight() { 142 return height; 143 } 144 145 @Override retain()146 public void retain() { 147 synchronized (refCountLock) { 148 assertTrue("Buffer retained after being destroyed.", refCount > 0); 149 ++refCount; 150 } 151 } 152 153 @Override release()154 public void release() { 155 synchronized (refCountLock) { 156 assertTrue("Buffer released too many times.", --refCount >= 0); 157 if (refCount == 0) { 158 releaseCallback.run(); 159 } 160 } 161 } 162 } 163 164 private static class MockTextureBuffer 165 extends MockBufferBase implements VideoFrame.TextureBuffer { 166 private final int textureId; 167 MockTextureBuffer(int textureId, int width, int height, Runnable releaseCallback)168 public MockTextureBuffer(int textureId, int width, int height, Runnable releaseCallback) { 169 super(width, height, releaseCallback); 170 this.textureId = textureId; 171 } 172 173 @Override getType()174 public VideoFrame.TextureBuffer.Type getType() { 175 return VideoFrame.TextureBuffer.Type.OES; 176 } 177 178 @Override getTextureId()179 public int getTextureId() { 180 return textureId; 181 } 182 183 @Override getTransformMatrix()184 public Matrix getTransformMatrix() { 185 return new Matrix(); 186 } 187 188 @Override toI420()189 public VideoFrame.I420Buffer toI420() { 190 return JavaI420Buffer.allocate(width, height); 191 } 192 193 @Override cropAndScale( int cropX, int cropY, int cropWidth, int cropHeight, int scaleWidth, int scaleHeight)194 public VideoFrame.Buffer cropAndScale( 195 int cropX, int cropY, int cropWidth, int cropHeight, int scaleWidth, int scaleHeight) { 196 retain(); 197 return new MockTextureBuffer(textureId, scaleWidth, scaleHeight, this ::release); 198 } 199 } 200 201 private static class MockI420Buffer extends MockBufferBase implements VideoFrame.I420Buffer { 202 private final JavaI420Buffer realBuffer; 203 MockI420Buffer(int width, int height, Runnable releaseCallback)204 public MockI420Buffer(int width, int height, Runnable releaseCallback) { 205 super(width, height, releaseCallback); 206 realBuffer = JavaI420Buffer.allocate(width, height); 207 } 208 209 @Override getDataY()210 public ByteBuffer getDataY() { 211 return realBuffer.getDataY(); 212 } 213 214 @Override getDataU()215 public ByteBuffer getDataU() { 216 return realBuffer.getDataU(); 217 } 218 219 @Override getDataV()220 public ByteBuffer getDataV() { 221 return realBuffer.getDataV(); 222 } 223 224 @Override getStrideY()225 public int getStrideY() { 226 return realBuffer.getStrideY(); 227 } 228 229 @Override getStrideU()230 public int getStrideU() { 231 return realBuffer.getStrideU(); 232 } 233 234 @Override getStrideV()235 public int getStrideV() { 236 return realBuffer.getStrideV(); 237 } 238 239 @Override toI420()240 public VideoFrame.I420Buffer toI420() { 241 retain(); 242 return this; 243 } 244 245 @Override retain()246 public void retain() { 247 super.retain(); 248 realBuffer.retain(); 249 } 250 251 @Override release()252 public void release() { 253 super.release(); 254 realBuffer.release(); 255 } 256 257 @Override cropAndScale( int cropX, int cropY, int cropWidth, int cropHeight, int scaleWidth, int scaleHeight)258 public VideoFrame.Buffer cropAndScale( 259 int cropX, int cropY, int cropWidth, int cropHeight, int scaleWidth, int scaleHeight) { 260 return realBuffer.cropAndScale(cropX, cropY, cropWidth, cropHeight, scaleWidth, scaleHeight); 261 } 262 } 263 264 // # Test fields 265 private final Object referencedFramesLock = new Object(); 266 private int referencedFrames; 267 268 private Runnable releaseFrameCallback = new Runnable() { 269 @Override 270 public void run() { 271 synchronized (referencedFramesLock) { 272 --referencedFrames; 273 } 274 } 275 }; 276 277 private EglBase14 eglBase; 278 private long lastTimestampNs; 279 280 // # Helper methods createEncoderFactory(EglBase.Context eglContext)281 private VideoEncoderFactory createEncoderFactory(EglBase.Context eglContext) { 282 return new HardwareVideoEncoderFactory( 283 eglContext, ENABLE_INTEL_VP8_ENCODER, ENABLE_H264_HIGH_PROFILE); 284 } 285 createEncoder()286 private @Nullable VideoEncoder createEncoder() { 287 VideoEncoderFactory factory = 288 createEncoderFactory(useEglContext ? eglBase.getEglBaseContext() : null); 289 VideoCodecInfo[] supportedCodecs = factory.getSupportedCodecs(); 290 return factory.createEncoder(supportedCodecs[0]); 291 } 292 generateI420Frame(int width, int height)293 private VideoFrame generateI420Frame(int width, int height) { 294 synchronized (referencedFramesLock) { 295 ++referencedFrames; 296 } 297 lastTimestampNs += TimeUnit.SECONDS.toNanos(1) / SETTINGS.maxFramerate; 298 VideoFrame.Buffer buffer = new MockI420Buffer(width, height, releaseFrameCallback); 299 return new VideoFrame(buffer, 0 /* rotation */, lastTimestampNs); 300 } 301 generateTextureFrame(int width, int height)302 private VideoFrame generateTextureFrame(int width, int height) { 303 synchronized (referencedFramesLock) { 304 ++referencedFrames; 305 } 306 final int textureId = GlUtil.generateTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES); 307 lastTimestampNs += TimeUnit.SECONDS.toNanos(1) / SETTINGS.maxFramerate; 308 VideoFrame.Buffer buffer = 309 new MockTextureBuffer(textureId, width, height, releaseFrameCallback); 310 return new VideoFrame(buffer, 0 /* rotation */, lastTimestampNs); 311 } 312 generateFrame(int width, int height)313 private VideoFrame generateFrame(int width, int height) { 314 return useTextures ? generateTextureFrame(width, height) : generateI420Frame(width, height); 315 } 316 testEncodeFrame( VideoEncoder encoder, VideoFrame frame, VideoEncoder.EncodeInfo info)317 static VideoCodecStatus testEncodeFrame( 318 VideoEncoder encoder, VideoFrame frame, VideoEncoder.EncodeInfo info) { 319 int numTries = 0; 320 321 // It takes a while for the encoder to become ready so try until it accepts the frame. 322 while (true) { 323 ++numTries; 324 325 final VideoCodecStatus returnValue = encoder.encode(frame, info); 326 switch (returnValue) { 327 case OK: // Success 328 // Fall through 329 case ERR_SIZE: // Wrong size 330 return returnValue; 331 case NO_OUTPUT: 332 if (numTries >= NUM_ENCODE_TRIES) { 333 fail("encoder.encode keeps returning NO_OUTPUT"); 334 } 335 try { 336 Thread.sleep(ENCODE_RETRY_SLEEP_MS); // Try again. 337 } catch (InterruptedException e) { 338 throw new RuntimeException(e); 339 } 340 break; 341 default: 342 fail("encoder.encode returned: " + returnValue); // Error 343 } 344 } 345 } 346 getAlignedNumber(int number, int alignment)347 private static int getAlignedNumber(int number, int alignment) { 348 return (number / alignment) * alignment; 349 } 350 getPixelAlignmentRequired()351 public static int getPixelAlignmentRequired() { 352 return PIXEL_ALIGNMENT_REQUIRED; 353 } 354 355 // # Tests 356 @Before setUp()357 public void setUp() { 358 NativeLibrary.initialize(new NativeLibrary.DefaultLoader(), TestConstants.NATIVE_LIBRARY); 359 360 eglBase = EglBase.createEgl14(EglBase.CONFIG_PLAIN); 361 eglBase.createDummyPbufferSurface(); 362 eglBase.makeCurrent(); 363 lastTimestampNs = System.nanoTime(); 364 } 365 366 @After tearDown()367 public void tearDown() { 368 eglBase.release(); 369 synchronized (referencedFramesLock) { 370 assertEquals("All frames were not released", 0, referencedFrames); 371 } 372 } 373 374 @Test 375 @SmallTest testInitialize()376 public void testInitialize() { 377 VideoEncoder encoder = createEncoder(); 378 assertEquals(VideoCodecStatus.OK, encoder.initEncode(SETTINGS, null)); 379 assertEquals(VideoCodecStatus.OK, encoder.release()); 380 } 381 382 @Test 383 @SmallTest testEncode()384 public void testEncode() { 385 VideoEncoder encoder = createEncoder(); 386 MockEncoderCallback callback = new MockEncoderCallback(); 387 assertEquals(VideoCodecStatus.OK, encoder.initEncode(SETTINGS, callback)); 388 389 for (int i = 0; i < NUM_TEST_FRAMES; i++) { 390 Log.d(TAG, "Test frame: " + i); 391 VideoFrame frame = generateFrame(SETTINGS.width, SETTINGS.height); 392 VideoEncoder.EncodeInfo info = new VideoEncoder.EncodeInfo( 393 new EncodedImage.FrameType[] {EncodedImage.FrameType.VideoFrameDelta}); 394 testEncodeFrame(encoder, frame, info); 395 396 callback.assertFrameEncoded(frame); 397 frame.release(); 398 } 399 400 assertEquals(VideoCodecStatus.OK, encoder.release()); 401 } 402 403 @Test 404 @SmallTest testEncodeAltenatingBuffers()405 public void testEncodeAltenatingBuffers() { 406 VideoEncoder encoder = createEncoder(); 407 MockEncoderCallback callback = new MockEncoderCallback(); 408 assertEquals(VideoCodecStatus.OK, encoder.initEncode(SETTINGS, callback)); 409 410 for (int i = 0; i < NUM_TEST_FRAMES; i++) { 411 Log.d(TAG, "Test frame: " + i); 412 VideoFrame frame; 413 VideoEncoder.EncodeInfo info = new VideoEncoder.EncodeInfo( 414 new EncodedImage.FrameType[] {EncodedImage.FrameType.VideoFrameDelta}); 415 416 frame = generateTextureFrame(SETTINGS.width, SETTINGS.height); 417 testEncodeFrame(encoder, frame, info); 418 callback.assertFrameEncoded(frame); 419 frame.release(); 420 421 frame = generateI420Frame(SETTINGS.width, SETTINGS.height); 422 testEncodeFrame(encoder, frame, info); 423 callback.assertFrameEncoded(frame); 424 frame.release(); 425 } 426 427 assertEquals(VideoCodecStatus.OK, encoder.release()); 428 } 429 430 @Test 431 @SmallTest testEncodeDifferentSizes()432 public void testEncodeDifferentSizes() { 433 VideoEncoder encoder = createEncoder(); 434 MockEncoderCallback callback = new MockEncoderCallback(); 435 assertEquals(VideoCodecStatus.OK, encoder.initEncode(SETTINGS, callback)); 436 437 VideoFrame frame; 438 VideoEncoder.EncodeInfo info = new VideoEncoder.EncodeInfo( 439 new EncodedImage.FrameType[] {EncodedImage.FrameType.VideoFrameDelta}); 440 441 frame = generateFrame(SETTINGS.width / 2, SETTINGS.height / 2); 442 testEncodeFrame(encoder, frame, info); 443 callback.assertFrameEncoded(frame); 444 frame.release(); 445 446 frame = generateFrame(SETTINGS.width, SETTINGS.height); 447 testEncodeFrame(encoder, frame, info); 448 callback.assertFrameEncoded(frame); 449 frame.release(); 450 451 // Android MediaCodec only guarantees of proper operation with 16-pixel-aligned input frame. 452 // Force the size of input frame with the greatest multiple of 16 below the original size. 453 frame = generateFrame(getAlignedNumber(SETTINGS.width / 4, PIXEL_ALIGNMENT_REQUIRED), 454 getAlignedNumber(SETTINGS.height / 4, PIXEL_ALIGNMENT_REQUIRED)); 455 testEncodeFrame(encoder, frame, info); 456 callback.assertFrameEncoded(frame); 457 frame.release(); 458 459 assertEquals(VideoCodecStatus.OK, encoder.release()); 460 } 461 462 @Test 463 @SmallTest testEncodeAlignmentCheck()464 public void testEncodeAlignmentCheck() { 465 VideoEncoder encoder = createEncoder(); 466 org.webrtc.HardwareVideoEncoderTest.MockEncoderCallback callback = 467 new org.webrtc.HardwareVideoEncoderTest.MockEncoderCallback(); 468 assertEquals(VideoCodecStatus.OK, encoder.initEncode(SETTINGS, callback)); 469 470 VideoFrame frame; 471 VideoEncoder.EncodeInfo info = new VideoEncoder.EncodeInfo( 472 new EncodedImage.FrameType[] {EncodedImage.FrameType.VideoFrameDelta}); 473 474 frame = generateFrame(SETTINGS.width / 2, SETTINGS.height / 2); 475 assertEquals(VideoCodecStatus.OK, testEncodeFrame(encoder, frame, info)); 476 frame.release(); 477 478 // Android MediaCodec only guarantees of proper operation with 16-pixel-aligned input frame. 479 // Following input frame with non-aligned size would return ERR_SIZE. 480 frame = generateFrame(SETTINGS.width / 4, SETTINGS.height / 4); 481 assertNotEquals(VideoCodecStatus.OK, testEncodeFrame(encoder, frame, info)); 482 frame.release(); 483 484 // Since our encoder has returned with an error, we reinitialize the encoder. 485 assertEquals(VideoCodecStatus.OK, encoder.release()); 486 assertEquals(VideoCodecStatus.OK, encoder.initEncode(SETTINGS, callback)); 487 488 frame = generateFrame(getAlignedNumber(SETTINGS.width / 4, PIXEL_ALIGNMENT_REQUIRED), 489 getAlignedNumber(SETTINGS.height / 4, PIXEL_ALIGNMENT_REQUIRED)); 490 assertEquals(VideoCodecStatus.OK, testEncodeFrame(encoder, frame, info)); 491 frame.release(); 492 493 assertEquals(VideoCodecStatus.OK, encoder.release()); 494 } 495 496 @Test 497 @SmallTest testGetEncoderInfo()498 public void testGetEncoderInfo() { 499 VideoEncoder encoder = createEncoder(); 500 assertEquals(VideoCodecStatus.OK, encoder.initEncode(SETTINGS, null)); 501 VideoEncoder.EncoderInfo info = encoder.getEncoderInfo(); 502 assertEquals(PIXEL_ALIGNMENT_REQUIRED, info.getRequestedResolutionAlignment()); 503 assertEquals( 504 APPLY_ALIGNMENT_TO_ALL_SIMULCAST_LAYERS, info.getApplyAlignmentToAllSimulcastLayers()); 505 assertEquals(VideoCodecStatus.OK, encoder.release()); 506 } 507 } 508