1 /* 2 * Copyright (C) 2013 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 package android.media.codec.cts; 18 19 import static org.junit.Assert.assertNotNull; 20 import static org.junit.Assert.assertTrue; 21 import static org.junit.Assert.fail; 22 23 import android.app.Presentation; 24 import android.content.ComponentName; 25 import android.content.Context; 26 import android.content.Intent; 27 import android.content.ServiceConnection; 28 import android.graphics.SurfaceTexture; 29 import android.graphics.drawable.ColorDrawable; 30 import android.hardware.display.DisplayManager; 31 import android.hardware.display.VirtualDisplay; 32 import android.media.MediaCodec; 33 import android.media.MediaCodec.BufferInfo; 34 import android.media.MediaCodecInfo; 35 import android.media.MediaCodecList; 36 import android.media.MediaFormat; 37 import android.media.cts.CompositionTextureView; 38 import android.media.cts.InputSurface; 39 import android.media.cts.OutputSurface; 40 import android.media.cts.R; 41 import android.opengl.GLES11Ext; 42 import android.opengl.GLES20; 43 import android.opengl.Matrix; 44 import android.os.Bundle; 45 import android.os.Handler; 46 import android.os.IBinder; 47 import android.os.Looper; 48 import android.os.Message; 49 import android.os.Parcel; 50 import android.util.Log; 51 import android.util.Size; 52 import android.view.Display; 53 import android.view.Surface; 54 import android.view.View; 55 import android.view.ViewGroup; 56 import android.view.ViewGroup.LayoutParams; 57 import android.view.WindowManager; 58 import android.widget.FrameLayout; 59 import android.widget.ImageView; 60 import android.widget.TableLayout; 61 import android.widget.TableRow; 62 63 import java.nio.ByteBuffer; 64 import java.nio.ByteOrder; 65 import java.nio.FloatBuffer; 66 import java.nio.IntBuffer; 67 import java.util.ArrayList; 68 import java.util.Arrays; 69 import java.util.List; 70 import java.util.concurrent.Semaphore; 71 import java.util.concurrent.TimeUnit; 72 import java.util.concurrent.atomic.AtomicInteger; 73 74 /** 75 * Impl class for tests using MediaCodec encoding with composition of multiple virtual displays. 76 */ 77 public class EncodeVirtualDisplayWithCompositionTestImpl { 78 private static final String TAG = "EncodeVirtualDisplayWithCompositionTestImpl"; 79 private static final boolean DBG = false; 80 private static final String MIME_TYPE = MediaFormat.MIMETYPE_VIDEO_AVC; 81 82 private static final long DEFAULT_WAIT_TIMEOUT_MS = 10000; // 10 seconds 83 private static final long DEQUEUE_TIMEOUT_US = 3000000; // 3 seconds 84 85 private static final int COLOR_RED = makeColor(100, 0, 0); 86 private static final int COLOR_GREEN = makeColor(0, 100, 0); 87 private static final int COLOR_BLUE = makeColor(0, 0, 100); 88 private static final int COLOR_GREY = makeColor(100, 100, 100); 89 90 public static final int BITRATE_1080p = 20000000; 91 public static final int BITRATE_720p = 14000000; 92 public static final int BITRATE_800x480 = 14000000; 93 public static final int BITRATE_DEFAULT = 10000000; 94 95 private static final int IFRAME_INTERVAL = 10; 96 97 private static final int MAX_NUM_WINDOWS = 3; 98 99 private static Handler sHandlerForRunOnMain = new Handler(Looper.getMainLooper()); 100 101 private Surface mEncodingSurface; 102 private OutputSurface mDecodingSurface; 103 private volatile boolean mCodecConfigReceived = false; 104 private volatile boolean mCodecBufferReceived = false; 105 private EncodingHelper mEncodingHelper; 106 private MediaCodec mDecoder; 107 private final ByteBuffer mPixelBuf = ByteBuffer.allocateDirect(4); 108 private volatile boolean mIsQuitting = false; 109 private Throwable mTestException; 110 private VirtualDisplayPresentation mLocalPresentation; 111 private RemoteVirtualDisplayPresentation mRemotePresentation; 112 private ByteBuffer[] mDecoderInputBuffers; 113 114 /** event listener for test without verifying output */ 115 private EncoderEventListener mEncoderEventListener = new EncoderEventListener() { 116 @Override 117 public void onCodecConfig(ByteBuffer data, MediaCodec.BufferInfo info) { 118 mCodecConfigReceived = true; 119 } 120 @Override 121 public void onBufferReady(ByteBuffer data, MediaCodec.BufferInfo info) { 122 mCodecBufferReceived = true; 123 } 124 }; 125 126 /* TEST_COLORS static initialization; need ARGB for ColorDrawable */ makeColor(int red, int green, int blue)127 private static int makeColor(int red, int green, int blue) { 128 return 0xff << 24 | (red & 0xff) << 16 | (green & 0xff) << 8 | (blue & 0xff); 129 } 130 131 /** 132 * Run rendering test in a separate thread. This is necessary as {@link OutputSurface} requires 133 * constructing it in a non-test thread. 134 * @param w 135 * @param h 136 * @throws Exception 137 */ runTestRenderingInSeparateThread(final Context context, final String mimeType, final int w, final int h, final boolean runRemotely, final boolean multipleWindows)138 public void runTestRenderingInSeparateThread(final Context context, final String mimeType, 139 final int w, final int h, final boolean runRemotely, final boolean multipleWindows) 140 throws Throwable { 141 runTestRenderingInSeparateThread( 142 context, mimeType, w, h, runRemotely, multipleWindows, /* degrees */ 0, null); 143 } 144 runTestRenderingInSeparateThread(final Context context, final String mimeType, final int w, final int h, final boolean runRemotely, final boolean multipleWindows, final int degrees, final String decoderName)145 public void runTestRenderingInSeparateThread(final Context context, final String mimeType, 146 final int w, final int h, final boolean runRemotely, final boolean multipleWindows, 147 final int degrees, final String decoderName) throws Throwable { 148 mTestException = null; 149 Thread renderingThread = new Thread(new Runnable() { 150 public void run() { 151 try { 152 doTestRenderingOutput( 153 context, mimeType, w, h, runRemotely, multipleWindows, 154 degrees, decoderName); 155 } catch (Throwable t) { 156 t.printStackTrace(); 157 mTestException = t; 158 } 159 } 160 }); 161 renderingThread.start(); 162 renderingThread.join(60000); 163 assertTrue(!renderingThread.isAlive()); 164 if (mTestException != null) { 165 throw mTestException; 166 } 167 } 168 doTestRenderingOutput(final Context context, String mimeType, int w, int h, boolean runRemotely, boolean multipleWindows, int degrees, String decoderName)169 private void doTestRenderingOutput(final Context context, String mimeType, int w, int h, 170 boolean runRemotely, boolean multipleWindows, int degrees, 171 String decoderName) throws Throwable { 172 if (DBG) { 173 Log.i(TAG, "doTestRenderingOutput for type:" + mimeType + " w:" + w + " h:" + h); 174 } 175 try { 176 mIsQuitting = false; 177 if (decoderName == null) { 178 mDecoder = MediaCodec.createDecoderByType(mimeType); 179 } else { 180 mDecoder = MediaCodec.createByCodecName(decoderName); 181 } 182 MediaFormat decoderFormat = MediaFormat.createVideoFormat(mimeType, w, h); 183 decoderFormat.setInteger( 184 MediaFormat.KEY_COLOR_RANGE, MediaFormat.COLOR_RANGE_LIMITED); 185 decoderFormat.setInteger( 186 MediaFormat.KEY_COLOR_STANDARD, MediaFormat.COLOR_STANDARD_BT601_PAL); 187 decoderFormat.setInteger( 188 MediaFormat.KEY_COLOR_TRANSFER, MediaFormat.COLOR_TRANSFER_SDR_VIDEO); 189 if (degrees != 0) { 190 decoderFormat.setInteger(MediaFormat.KEY_ROTATION, degrees); 191 } 192 mDecodingSurface = new OutputSurface(w, h); 193 mDecoder.configure(decoderFormat, mDecodingSurface.getSurface(), null, 0); 194 // only scale to fit scaling mode is supported 195 mDecoder.setVideoScalingMode(MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT); 196 mDecoder.start(); 197 mDecoderInputBuffers = mDecoder.getInputBuffers(); 198 199 mEncodingHelper = new EncodingHelper(); 200 mEncodingSurface = mEncodingHelper.startEncoding(mimeType, w, h, 201 new EncoderEventListener() { 202 @Override 203 public void onCodecConfig(ByteBuffer data, BufferInfo info) { 204 if (DBG) { 205 Log.i(TAG, "onCodecConfig l:" + info.size); 206 } 207 handleEncodedData(data, info); 208 } 209 210 @Override 211 public void onBufferReady(ByteBuffer data, BufferInfo info) { 212 if (DBG) { 213 Log.i(TAG, "onBufferReady l:" + info.size); 214 } 215 handleEncodedData(data, info); 216 } 217 218 private void handleEncodedData(ByteBuffer data, BufferInfo info) { 219 if (mIsQuitting) { 220 if (DBG) { 221 Log.i(TAG, "ignore data as test is quitting"); 222 } 223 return; 224 } 225 int inputBufferIndex = mDecoder.dequeueInputBuffer(DEQUEUE_TIMEOUT_US); 226 if (inputBufferIndex < 0) { 227 if (DBG) { 228 Log.i(TAG, "dequeueInputBuffer returned:" + inputBufferIndex); 229 } 230 return; 231 } 232 assertTrue(inputBufferIndex >= 0); 233 ByteBuffer inputBuffer = mDecoderInputBuffers[inputBufferIndex]; 234 inputBuffer.clear(); 235 inputBuffer.put(data); 236 mDecoder.queueInputBuffer(inputBufferIndex, 0, info.size, 237 info.presentationTimeUs, info.flags); 238 } 239 }); 240 GlCompositor compositor = new GlCompositor(context); 241 if (DBG) { 242 Log.i(TAG, "start composition"); 243 } 244 compositor.startComposition(mEncodingSurface, w, h, multipleWindows ? 3 : 1); 245 246 if (DBG) { 247 Log.i(TAG, "create display"); 248 } 249 250 Renderer renderer = null; 251 Surface windowSurface = compositor.getWindowSurface(multipleWindows? 1 : 0); 252 if (runRemotely) { 253 mRemotePresentation = 254 new RemoteVirtualDisplayPresentation(context, windowSurface, w, h); 255 mRemotePresentation.connect(); 256 mRemotePresentation.start(); 257 renderer = mRemotePresentation; 258 } else { 259 mLocalPresentation = (degrees == 0) 260 ? new VirtualDisplayPresentation(context, windowSurface, w, h) 261 : new RotateVirtualDisplayPresentation(context, windowSurface, w, h); 262 mLocalPresentation.createVirtualDisplay(); 263 mLocalPresentation.createPresentation(); 264 renderer = mLocalPresentation; 265 } 266 267 if (DBG) { 268 Log.i(TAG, "start rendering and check"); 269 } 270 if (degrees == 0) { 271 renderColorAndCheckResult(renderer, w, h, COLOR_RED); 272 renderColorAndCheckResult(renderer, w, h, COLOR_BLUE); 273 renderColorAndCheckResult(renderer, w, h, COLOR_GREEN); 274 renderColorAndCheckResult(renderer, w, h, COLOR_GREY); 275 } else { 276 renderRotationAndCheckResult(renderer, w, h, degrees); 277 } 278 279 mIsQuitting = true; 280 if (runRemotely) { 281 mRemotePresentation.disconnect(); 282 } else { 283 mLocalPresentation.dismissPresentation(); 284 mLocalPresentation.destroyVirtualDisplay(); 285 } 286 287 compositor.stopComposition(); 288 } finally { 289 if (mEncodingHelper != null) { 290 mEncodingHelper.stopEncoding(); 291 mEncodingHelper = null; 292 } 293 if (mDecoder != null) { 294 mDecoder.stop(); 295 mDecoder.release(); 296 mDecoder = null; 297 } 298 if (mDecodingSurface != null) { 299 mDecodingSurface.release(); 300 mDecodingSurface = null; 301 } 302 } 303 } 304 305 private static final int NUM_MAX_RETRY = 120; 306 private static final int IMAGE_WAIT_TIMEOUT_MS = 1000; 307 renderColorAndCheckResult(Renderer renderer, int w, int h, int color)308 private void renderColorAndCheckResult(Renderer renderer, int w, int h, 309 int color) throws Exception { 310 BufferInfo info = new BufferInfo(); 311 for (int i = 0; i < NUM_MAX_RETRY; i++) { 312 renderer.doRendering(color); 313 int bufferIndex = mDecoder.dequeueOutputBuffer(info, DEQUEUE_TIMEOUT_US); 314 if (DBG) { 315 Log.i(TAG, "decoder dequeueOutputBuffer returned " + bufferIndex); 316 } 317 if (bufferIndex < 0) { 318 continue; 319 } 320 mDecoder.releaseOutputBuffer(bufferIndex, true); 321 if (mDecodingSurface.checkForNewImage(IMAGE_WAIT_TIMEOUT_MS)) { 322 mDecodingSurface.drawImage(); 323 if (checkSurfaceFrameColor(w, h, color)) { 324 Log.i(TAG, "color " + Integer.toHexString(color) + " matched"); 325 return; 326 } 327 } else if(DBG) { 328 Log.i(TAG, "no rendering yet"); 329 } 330 } 331 fail("Color did not match"); 332 } 333 renderRotationAndCheckResult(Renderer renderer, int w, int h, int degrees)334 private void renderRotationAndCheckResult(Renderer renderer, int w, int h, 335 int degrees) throws Exception { 336 BufferInfo info = new BufferInfo(); 337 for (int i = 0; i < NUM_MAX_RETRY; i++) { 338 renderer.doRendering(-1); 339 int bufferIndex = mDecoder.dequeueOutputBuffer(info, DEQUEUE_TIMEOUT_US); 340 if (DBG) { 341 Log.i(TAG, "decoder dequeueOutputBuffer returned " + bufferIndex); 342 } 343 if (bufferIndex < 0) { 344 continue; 345 } 346 mDecoder.releaseOutputBuffer(bufferIndex, true); 347 if (mDecodingSurface.checkForNewImage(IMAGE_WAIT_TIMEOUT_MS)) { 348 mDecodingSurface.drawImage(); 349 if (checkRotatedFrameQuadrants(w, h, degrees)) { 350 Log.i(TAG, "output rotated " + degrees + " degrees"); 351 return; 352 } 353 } else if(DBG) { 354 Log.i(TAG, "no rendering yet"); 355 } 356 } 357 fail("Frame not properly rotated"); 358 } 359 checkRotatedFrameQuadrants(int w, int h, int degrees)360 private boolean checkRotatedFrameQuadrants(int w, int h, int degrees) { 361 // Read a pixel from each quadrant of the surface. 362 int ww = w / 4; 363 int hh = h / 4; 364 // coords is ordered counter clockwise (note, gl 0,0 is bottom left) 365 int[][] coords = new int[][] {{ww, hh}, {ww * 3, hh}, {ww * 3, hh * 3}, {ww, hh * 3}}; 366 List<Integer> expected = new ArrayList<>(); 367 List<Integer> colors = Arrays.asList( 368 new Integer[] {COLOR_GREEN, COLOR_BLUE, COLOR_RED, COLOR_GREY}); 369 expected.addAll(colors); 370 expected.addAll(colors); 371 int offset = (degrees / 90) % 4; 372 for (int i = 0; i < coords.length; i++) { 373 int[] c = coords[i]; 374 int x = c[0]; 375 int y = c[1]; 376 GLES20.glReadPixels(x, y, 1, 1, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, mPixelBuf); 377 int r = mPixelBuf.get(0) & 0xff; 378 int g = mPixelBuf.get(1) & 0xff; 379 int b = mPixelBuf.get(2) & 0xff; 380 // adding the offset to rotate expected colors clockwise 381 int color = expected.get(offset + i); 382 int redExpected = (color >> 16) & 0xff; 383 int greenExpected = (color >> 8) & 0xff; 384 int blueExpected = color & 0xff; 385 Log.i(TAG, String.format("(%d,%d) expecting %d,%d,%d saw %d,%d,%d", 386 x, y, redExpected, greenExpected, blueExpected, r, g, b)); 387 if (!approxEquals(redExpected, r) || !approxEquals(greenExpected, g) 388 || !approxEquals(blueExpected, b)) { 389 return false; 390 } 391 } 392 return true; 393 } 394 checkSurfaceFrameColor(int w, int h, int color)395 private boolean checkSurfaceFrameColor(int w, int h, int color) { 396 // Read a pixel from the center of the surface. Might want to read from multiple points 397 // and average them together. 398 int x = w / 2; 399 int y = h / 2; 400 GLES20.glReadPixels(x, y, 1, 1, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, mPixelBuf); 401 int r = mPixelBuf.get(0) & 0xff; 402 int g = mPixelBuf.get(1) & 0xff; 403 int b = mPixelBuf.get(2) & 0xff; 404 405 int redExpected = (color >> 16) & 0xff; 406 int greenExpected = (color >> 8) & 0xff; 407 int blueExpected = color & 0xff; 408 if (approxEquals(redExpected, r) && approxEquals(greenExpected, g) 409 && approxEquals(blueExpected, b)) { 410 return true; 411 } 412 Log.i(TAG, "expected 0x" + Integer.toHexString(color) + " got 0x" 413 + Integer.toHexString(makeColor(r, g, b))); 414 return false; 415 } 416 417 /** 418 * Determines if two color values are approximately equal. 419 */ approxEquals(int expected, int actual)420 private static boolean approxEquals(int expected, int actual) { 421 // allow differences between BT.601 and BT.709 conversions during encoding/decoding for now 422 final int MAX_DELTA = 17; 423 return Math.abs(expected - actual) <= MAX_DELTA; 424 } 425 426 private static final int NUM_CODEC_CREATION = 5; 427 private static final int NUM_DISPLAY_CREATION = 10; 428 private static final int NUM_RENDERING = 10; doTestVirtualDisplayRecycles(final Context context, int numDisplays)429 public void doTestVirtualDisplayRecycles(final Context context, int numDisplays) 430 throws Exception { 431 Size maxSize = getMaxSupportedEncoderSize(); 432 if (maxSize == null) { 433 Log.i(TAG, "no codec found, skipping"); 434 return; 435 } 436 VirtualDisplayPresentation[] virtualDisplays = new VirtualDisplayPresentation[numDisplays]; 437 for (int i = 0; i < NUM_CODEC_CREATION; i++) { 438 mCodecConfigReceived = false; 439 mCodecBufferReceived = false; 440 if (DBG) { 441 Log.i(TAG, "start encoding"); 442 } 443 EncodingHelper encodingHelper = new EncodingHelper(); 444 try { 445 mEncodingSurface = encodingHelper.startEncoding( 446 MIME_TYPE, maxSize.getWidth(), maxSize.getHeight(), mEncoderEventListener); 447 GlCompositor compositor = new GlCompositor(context); 448 if (DBG) { 449 Log.i(TAG, "start composition"); 450 } 451 compositor.startComposition(mEncodingSurface, 452 maxSize.getWidth(), maxSize.getHeight(), numDisplays); 453 for (int j = 0; j < NUM_DISPLAY_CREATION; j++) { 454 if (DBG) { 455 Log.i(TAG, "create display"); 456 } 457 for (int k = 0; k < numDisplays; k++) { 458 virtualDisplays[k] = 459 new VirtualDisplayPresentation(context, 460 compositor.getWindowSurface(k), 461 maxSize.getWidth()/numDisplays, maxSize.getHeight()); 462 virtualDisplays[k].createVirtualDisplay(); 463 virtualDisplays[k].createPresentation(); 464 } 465 if (DBG) { 466 Log.i(TAG, "start rendering"); 467 } 468 for (int k = 0; k < NUM_RENDERING; k++) { 469 for (int l = 0; l < numDisplays; l++) { 470 virtualDisplays[l].doRendering(COLOR_RED); 471 } 472 // do not care how many frames are actually rendered. 473 Thread.sleep(1); 474 } 475 for (int k = 0; k < numDisplays; k++) { 476 virtualDisplays[k].dismissPresentation(); 477 virtualDisplays[k].destroyVirtualDisplay(); 478 } 479 compositor.recreateWindows(); 480 } 481 if (DBG) { 482 Log.i(TAG, "stop composition"); 483 } 484 compositor.stopComposition(); 485 } finally { 486 if (DBG) { 487 Log.i(TAG, "stop encoding"); 488 } 489 encodingHelper.stopEncoding(); 490 assertTrue(mCodecConfigReceived); 491 assertTrue(mCodecBufferReceived); 492 } 493 } 494 } 495 496 interface EncoderEventListener { onCodecConfig(ByteBuffer data, MediaCodec.BufferInfo info)497 public void onCodecConfig(ByteBuffer data, MediaCodec.BufferInfo info); onBufferReady(ByteBuffer data, MediaCodec.BufferInfo info)498 public void onBufferReady(ByteBuffer data, MediaCodec.BufferInfo info); 499 } 500 501 private class EncodingHelper { 502 private MediaCodec mEncoder; 503 private volatile boolean mStopEncoding = false; 504 private EncoderEventListener mEventListener; 505 private String mMimeType; 506 private int mW; 507 private int mH; 508 private Thread mEncodingThread; 509 private Surface mEncodingSurface; 510 private Semaphore mInitCompleted = new Semaphore(0); 511 private Exception mEncodingError; 512 startEncoding(String mimeType, int w, int h, EncoderEventListener eventListener)513 Surface startEncoding(String mimeType, int w, int h, EncoderEventListener eventListener) { 514 mStopEncoding = false; 515 mMimeType = mimeType; 516 mW = w; 517 mH = h; 518 mEventListener = eventListener; 519 mEncodingError = null; 520 mEncodingThread = new Thread(new Runnable() { 521 @Override 522 public void run() { 523 try { 524 doEncoding(); 525 } catch (Exception e) { 526 e.printStackTrace(); 527 // Throwing the exception here will crash the thread and subsequently the 528 // entire test process. We save it here and throw later in stopEncoding(). 529 mEncodingError = e; 530 } 531 } 532 }); 533 mEncodingThread.start(); 534 try { 535 if (DBG) { 536 Log.i(TAG, "wait for encoder init"); 537 } 538 mInitCompleted.acquire(); 539 if (DBG) { 540 Log.i(TAG, "wait for encoder done"); 541 } 542 } catch (InterruptedException e) { 543 fail("should not happen"); 544 } 545 return mEncodingSurface; 546 } 547 stopEncoding()548 void stopEncoding() throws Exception { 549 try { 550 mStopEncoding = true; 551 mEncodingThread.join(); 552 } catch(InterruptedException e) { 553 // just ignore 554 } finally { 555 mEncodingThread = null; 556 } 557 // Throw here if any error occurred in the encoding thread. 558 if (mEncodingError != null) { 559 throw mEncodingError; 560 } 561 } 562 doEncoding()563 private void doEncoding() throws Exception { 564 final int TIMEOUT_USEC_NORMAL = 1000000; 565 MediaFormat format = MediaFormat.createVideoFormat(mMimeType, mW, mH); 566 format.setInteger(MediaFormat.KEY_COLOR_FORMAT, 567 MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface); 568 int bitRate = BITRATE_DEFAULT; 569 if (mW == 1920 && mH == 1080) { 570 bitRate = BITRATE_1080p; 571 } else if (mW == 1280 && mH == 720) { 572 bitRate = BITRATE_720p; 573 } else if (mW == 800 && mH == 480) { 574 bitRate = BITRATE_800x480; 575 } 576 format.setInteger(MediaFormat.KEY_BIT_RATE, bitRate); 577 format.setInteger(MediaFormat.KEY_FRAME_RATE, 30); 578 format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL); 579 format.setInteger(MediaFormat.KEY_COLOR_RANGE, MediaFormat.COLOR_RANGE_LIMITED); 580 format.setInteger(MediaFormat.KEY_COLOR_STANDARD, MediaFormat.COLOR_STANDARD_BT601_PAL); 581 format.setInteger(MediaFormat.KEY_COLOR_TRANSFER, MediaFormat.COLOR_TRANSFER_SDR_VIDEO); 582 583 MediaCodecList mcl = new MediaCodecList(MediaCodecList.REGULAR_CODECS); 584 String codecName = null; 585 if ((codecName = mcl.findEncoderForFormat(format)) == null) { 586 throw new RuntimeException("encoder "+ MIME_TYPE + " not support : " + format.toString()); 587 } 588 589 try { 590 mEncoder = MediaCodec.createByCodecName(codecName); 591 mEncoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); 592 mEncodingSurface = mEncoder.createInputSurface(); 593 mEncoder.start(); 594 mInitCompleted.release(); 595 if (DBG) { 596 Log.i(TAG, "starting encoder"); 597 } 598 ByteBuffer[] encoderOutputBuffers = mEncoder.getOutputBuffers(); 599 MediaCodec.BufferInfo info = new MediaCodec.BufferInfo(); 600 while (!mStopEncoding) { 601 int index = mEncoder.dequeueOutputBuffer(info, TIMEOUT_USEC_NORMAL); 602 if (DBG) { 603 Log.i(TAG, "encoder dequeOutputBuffer returned " + index); 604 } 605 if (index >= 0) { 606 if ((info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) { 607 Log.i(TAG, "codec config data"); 608 ByteBuffer encodedData = encoderOutputBuffers[index]; 609 encodedData.position(info.offset); 610 encodedData.limit(info.offset + info.size); 611 mEventListener.onCodecConfig(encodedData, info); 612 mEncoder.releaseOutputBuffer(index, false); 613 } else if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { 614 Log.i(TAG, "EOS, stopping encoding"); 615 break; 616 } else { 617 ByteBuffer encodedData = encoderOutputBuffers[index]; 618 encodedData.position(info.offset); 619 encodedData.limit(info.offset + info.size); 620 mEventListener.onBufferReady(encodedData, info); 621 mEncoder.releaseOutputBuffer(index, false); 622 } 623 } else if (index == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED){ 624 Log.i(TAG, "output buffer changed"); 625 encoderOutputBuffers = mEncoder.getOutputBuffers(); 626 } 627 } 628 } catch (Exception e) { 629 e.printStackTrace(); 630 throw e; 631 } finally { 632 if (mEncoder != null) { 633 mEncoder.stop(); 634 mEncoder.release(); 635 mEncoder = null; 636 } 637 if (mEncodingSurface != null) { 638 mEncodingSurface.release(); 639 mEncodingSurface = null; 640 } 641 } 642 } 643 } 644 645 /** 646 * Handles composition of multiple SurfaceTexture into a single Surface 647 */ 648 private static class GlCompositor implements SurfaceTexture.OnFrameAvailableListener { 649 private final Context mContext; 650 private Surface mSurface; 651 private int mWidth; 652 private int mHeight; 653 private volatile int mNumWindows; 654 private GlWindow mTopWindow; 655 private Thread mCompositionThread; 656 private Semaphore mStartCompletionSemaphore; 657 private Semaphore mRecreationCompletionSemaphore; 658 private Looper mLooper; 659 private Handler mHandler; 660 private InputSurface mEglHelper; 661 private int mGlProgramId = 0; 662 private int mGluMVPMatrixHandle; 663 private int mGluSTMatrixHandle; 664 private int mGlaPositionHandle; 665 private int mGlaTextureHandle; 666 private float[] mMVPMatrix = new float[16]; 667 private TopWindowVirtualDisplayPresentation mTopPresentation; 668 669 private static final String VERTEX_SHADER = 670 "uniform mat4 uMVPMatrix;\n" + 671 "uniform mat4 uSTMatrix;\n" + 672 "attribute vec4 aPosition;\n" + 673 "attribute vec4 aTextureCoord;\n" + 674 "varying vec2 vTextureCoord;\n" + 675 "void main() {\n" + 676 " gl_Position = uMVPMatrix * aPosition;\n" + 677 " vTextureCoord = (uSTMatrix * aTextureCoord).xy;\n" + 678 "}\n"; 679 680 private static final String FRAGMENT_SHADER = 681 "#extension GL_OES_EGL_image_external : require\n" + 682 "precision mediump float;\n" + 683 "varying vec2 vTextureCoord;\n" + 684 "uniform samplerExternalOES sTexture;\n" + 685 "void main() {\n" + 686 " gl_FragColor = texture2D(sTexture, vTextureCoord);\n" + 687 "}\n"; 688 GlCompositor(Context context)689 public GlCompositor(Context context) { 690 mContext = context; 691 } 692 startComposition(Surface surface, int w, int h, int numWindows)693 void startComposition(Surface surface, int w, int h, int numWindows) throws Exception { 694 mSurface = surface; 695 mWidth = w; 696 mHeight = h; 697 mNumWindows = numWindows; 698 mCompositionThread = new Thread(new CompositionRunnable()); 699 mStartCompletionSemaphore = new Semaphore(0); 700 mCompositionThread.start(); 701 waitForStartCompletion(); 702 } 703 stopComposition()704 void stopComposition() { 705 try { 706 if (mLooper != null) { 707 mLooper.quit(); 708 mCompositionThread.join(); 709 } 710 } catch (InterruptedException e) { 711 // don't care 712 } 713 mCompositionThread = null; 714 mSurface = null; 715 mStartCompletionSemaphore = null; 716 } 717 getWindowSurface(int windowIndex)718 Surface getWindowSurface(int windowIndex) { 719 return mTopPresentation.getSurface(windowIndex); 720 } 721 recreateWindows()722 void recreateWindows() throws Exception { 723 mRecreationCompletionSemaphore = new Semaphore(0); 724 Message msg = mHandler.obtainMessage(CompositionHandler.DO_RECREATE_WINDOWS); 725 mHandler.sendMessage(msg); 726 if(!mRecreationCompletionSemaphore.tryAcquire(DEFAULT_WAIT_TIMEOUT_MS, 727 TimeUnit.MILLISECONDS)) { 728 fail("recreation timeout"); 729 } 730 mTopPresentation.waitForSurfaceReady(DEFAULT_WAIT_TIMEOUT_MS); 731 } 732 733 @Override onFrameAvailable(SurfaceTexture surface)734 public void onFrameAvailable(SurfaceTexture surface) { 735 if (DBG) { 736 Log.i(TAG, "onFrameAvailable " + surface); 737 } 738 GlWindow w = mTopWindow; 739 if (w != null) { 740 w.markTextureUpdated(); 741 requestUpdate(); 742 } else { 743 Log.w(TAG, "top window gone"); 744 } 745 } 746 requestUpdate()747 private void requestUpdate() { 748 Thread compositionThread = mCompositionThread; 749 if (compositionThread == null || !compositionThread.isAlive()) { 750 return; 751 } 752 Message msg = mHandler.obtainMessage(CompositionHandler.DO_RENDERING); 753 mHandler.sendMessage(msg); 754 } 755 loadShader(int shaderType, String source)756 private int loadShader(int shaderType, String source) throws GlException { 757 int shader = GLES20.glCreateShader(shaderType); 758 checkGlError("glCreateShader type=" + shaderType); 759 GLES20.glShaderSource(shader, source); 760 GLES20.glCompileShader(shader); 761 int[] compiled = new int[1]; 762 GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compiled, 0); 763 if (compiled[0] == 0) { 764 Log.e(TAG, "Could not compile shader " + shaderType + ":"); 765 Log.e(TAG, " " + GLES20.glGetShaderInfoLog(shader)); 766 GLES20.glDeleteShader(shader); 767 shader = 0; 768 } 769 return shader; 770 } 771 createProgram(String vertexSource, String fragmentSource)772 private int createProgram(String vertexSource, String fragmentSource) throws GlException { 773 int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexSource); 774 if (vertexShader == 0) { 775 return 0; 776 } 777 int pixelShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource); 778 if (pixelShader == 0) { 779 return 0; 780 } 781 782 int program = GLES20.glCreateProgram(); 783 checkGlError("glCreateProgram"); 784 if (program == 0) { 785 Log.e(TAG, "Could not create program"); 786 } 787 GLES20.glAttachShader(program, vertexShader); 788 checkGlError("glAttachShader"); 789 GLES20.glAttachShader(program, pixelShader); 790 checkGlError("glAttachShader"); 791 GLES20.glLinkProgram(program); 792 int[] linkStatus = new int[1]; 793 GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0); 794 if (linkStatus[0] != GLES20.GL_TRUE) { 795 Log.e(TAG, "Could not link program: "); 796 Log.e(TAG, GLES20.glGetProgramInfoLog(program)); 797 GLES20.glDeleteProgram(program); 798 program = 0; 799 } 800 return program; 801 } 802 initGl()803 private void initGl() throws GlException { 804 mEglHelper = new InputSurface(mSurface); 805 mEglHelper.makeCurrent(); 806 mGlProgramId = createProgram(VERTEX_SHADER, FRAGMENT_SHADER); 807 mGlaPositionHandle = GLES20.glGetAttribLocation(mGlProgramId, "aPosition"); 808 checkGlError("glGetAttribLocation aPosition"); 809 if (mGlaPositionHandle == -1) { 810 throw new RuntimeException("Could not get attrib location for aPosition"); 811 } 812 mGlaTextureHandle = GLES20.glGetAttribLocation(mGlProgramId, "aTextureCoord"); 813 checkGlError("glGetAttribLocation aTextureCoord"); 814 if (mGlaTextureHandle == -1) { 815 throw new RuntimeException("Could not get attrib location for aTextureCoord"); 816 } 817 mGluMVPMatrixHandle = GLES20.glGetUniformLocation(mGlProgramId, "uMVPMatrix"); 818 checkGlError("glGetUniformLocation uMVPMatrix"); 819 if (mGluMVPMatrixHandle == -1) { 820 throw new RuntimeException("Could not get attrib location for uMVPMatrix"); 821 } 822 mGluSTMatrixHandle = GLES20.glGetUniformLocation(mGlProgramId, "uSTMatrix"); 823 checkGlError("glGetUniformLocation uSTMatrix"); 824 if (mGluSTMatrixHandle == -1) { 825 throw new RuntimeException("Could not get attrib location for uSTMatrix"); 826 } 827 Matrix.setIdentityM(mMVPMatrix, 0); 828 Log.i(TAG, "initGl w:" + mWidth + " h:" + mHeight); 829 GLES20.glViewport(0, 0, mWidth, mHeight); 830 float[] vMatrix = new float[16]; 831 float[] projMatrix = new float[16]; 832 // max window is from (0,0) to (mWidth - 1, mHeight - 1) 833 float wMid = mWidth / 2f; 834 float hMid = mHeight / 2f; 835 // look from positive z to hide windows in lower z 836 Matrix.setLookAtM(vMatrix, 0, wMid, hMid, 5f, wMid, hMid, 0f, 0f, 1.0f, 0.0f); 837 Matrix.orthoM(projMatrix, 0, -wMid, wMid, -hMid, hMid, 1, 10); 838 Matrix.multiplyMM(mMVPMatrix, 0, projMatrix, 0, vMatrix, 0); 839 createWindows(); 840 841 } 842 createWindows()843 private void createWindows() throws GlException { 844 mTopWindow = new GlWindow(this, 0, 0, mWidth, mHeight); 845 mTopWindow.init(); 846 mTopPresentation = new TopWindowVirtualDisplayPresentation(mContext, 847 mTopWindow.getSurface(), mWidth, mHeight, mNumWindows); 848 mTopPresentation.createVirtualDisplay(); 849 mTopPresentation.createPresentation(); 850 ((TopWindowPresentation) mTopPresentation.getPresentation()).populateWindows(); 851 } 852 cleanupGl()853 private void cleanupGl() { 854 if (mTopPresentation != null) { 855 mTopPresentation.dismissPresentation(); 856 mTopPresentation.destroyVirtualDisplay(); 857 mTopPresentation = null; 858 } 859 if (mTopWindow != null) { 860 mTopWindow.cleanup(); 861 mTopWindow = null; 862 } 863 if (mEglHelper != null) { 864 mEglHelper.release(); 865 mEglHelper = null; 866 } 867 } 868 doGlRendering()869 private void doGlRendering() throws GlException { 870 if (DBG) { 871 Log.i(TAG, "doGlRendering"); 872 } 873 mTopWindow.updateTexImageIfNecessary(); 874 GLES20.glClearColor(0.0f, 1.0f, 0.0f, 1.0f); 875 GLES20.glClear(GLES20.GL_DEPTH_BUFFER_BIT | GLES20.GL_COLOR_BUFFER_BIT); 876 877 GLES20.glUseProgram(mGlProgramId); 878 GLES20.glUniformMatrix4fv(mGluMVPMatrixHandle, 1, false, mMVPMatrix, 0); 879 mTopWindow.onDraw(mGluSTMatrixHandle, mGlaPositionHandle, mGlaTextureHandle); 880 checkGlError("window draw"); 881 if (DBG) { 882 final IntBuffer pixels = IntBuffer.allocate(1); 883 GLES20.glReadPixels(mWidth / 2, mHeight / 2, 1, 1, 884 GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, pixels); 885 Log.i(TAG, "glReadPixels returned 0x" + Integer.toHexString(pixels.get(0))); 886 } 887 mEglHelper.swapBuffers(); 888 } 889 doRecreateWindows()890 private void doRecreateWindows() throws GlException { 891 mTopPresentation.dismissPresentation(); 892 mTopPresentation.destroyVirtualDisplay(); 893 mTopWindow.cleanup(); 894 createWindows(); 895 mRecreationCompletionSemaphore.release(); 896 } 897 waitForStartCompletion()898 private void waitForStartCompletion() throws Exception { 899 if (!mStartCompletionSemaphore.tryAcquire(DEFAULT_WAIT_TIMEOUT_MS, 900 TimeUnit.MILLISECONDS)) { 901 fail("start timeout"); 902 } 903 mStartCompletionSemaphore = null; 904 mTopPresentation.waitForSurfaceReady(DEFAULT_WAIT_TIMEOUT_MS); 905 } 906 907 private class CompositionRunnable implements Runnable { 908 @Override run()909 public void run() { 910 try { 911 Looper.prepare(); 912 mLooper = Looper.myLooper(); 913 mHandler = new CompositionHandler(); 914 initGl(); 915 // init done 916 mStartCompletionSemaphore.release(); 917 Looper.loop(); 918 } catch (GlException e) { 919 e.printStackTrace(); 920 fail("got gl exception"); 921 } finally { 922 cleanupGl(); 923 mHandler = null; 924 mLooper = null; 925 } 926 } 927 } 928 929 private class CompositionHandler extends Handler { 930 private static final int DO_RENDERING = 1; 931 private static final int DO_RECREATE_WINDOWS = 2; 932 933 @Override handleMessage(Message msg)934 public void handleMessage(Message msg) { 935 try { 936 switch(msg.what) { 937 case DO_RENDERING: { 938 doGlRendering(); 939 } break; 940 case DO_RECREATE_WINDOWS: { 941 doRecreateWindows(); 942 } break; 943 } 944 } catch (GlException e) { 945 //ignore as this can happen during tearing down 946 } 947 } 948 } 949 950 private class GlWindow { 951 private static final int FLOAT_SIZE_BYTES = 4; 952 private static final int TRIANGLE_VERTICES_DATA_STRIDE_BYTES = 5 * FLOAT_SIZE_BYTES; 953 private static final int TRIANGLE_VERTICES_DATA_POS_OFFSET = 0; 954 private static final int TRIANGLE_VERTICES_DATA_UV_OFFSET = 3; 955 private int mBlX; 956 private int mBlY; 957 private int mWidth; 958 private int mHeight; 959 private int mTextureId = 0; // 0 is invalid 960 private volatile SurfaceTexture mSurfaceTexture; 961 private volatile Surface mSurface; 962 private FloatBuffer mVerticesData; 963 private float[] mSTMatrix = new float[16]; 964 private AtomicInteger mNumTextureUpdated = new AtomicInteger(0); 965 private GlCompositor mCompositor; 966 967 /** 968 * @param blX X coordinate of bottom-left point of window 969 * @param blY Y coordinate of bottom-left point of window 970 * @param w window width 971 * @param h window height 972 */ GlWindow(GlCompositor compositor, int blX, int blY, int w, int h)973 public GlWindow(GlCompositor compositor, int blX, int blY, int w, int h) { 974 mCompositor = compositor; 975 mBlX = blX; 976 mBlY = blY; 977 mWidth = w; 978 mHeight = h; 979 int trX = blX + w; 980 int trY = blY + h; 981 float[] vertices = new float[] { 982 // x, y, z, u, v 983 mBlX, mBlY, 0, 0, 0, 984 trX, mBlY, 0, 1, 0, 985 mBlX, trY, 0, 0, 1, 986 trX, trY, 0, 1, 1 987 }; 988 Log.i(TAG, "create window " + this + " blX:" + mBlX + " blY:" + mBlY + " trX:" + 989 trX + " trY:" + trY); 990 mVerticesData = ByteBuffer.allocateDirect( 991 vertices.length * FLOAT_SIZE_BYTES) 992 .order(ByteOrder.nativeOrder()).asFloatBuffer(); 993 mVerticesData.put(vertices).position(0); 994 } 995 996 /** 997 * initialize the window for composition. counter-part is cleanup() 998 * @throws GlException 999 */ init()1000 public void init() throws GlException { 1001 int[] textures = new int[1]; 1002 GLES20.glGenTextures(1, textures, 0); 1003 1004 mTextureId = textures[0]; 1005 GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mTextureId); 1006 checkGlError("glBindTexture mTextureID"); 1007 1008 GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 1009 GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST); 1010 GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 1011 GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_NEAREST); 1012 GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S, 1013 GLES20.GL_CLAMP_TO_EDGE); 1014 GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T, 1015 GLES20.GL_CLAMP_TO_EDGE); 1016 checkGlError("glTexParameter"); 1017 mSurfaceTexture = new SurfaceTexture(mTextureId); 1018 mSurfaceTexture.setDefaultBufferSize(mWidth, mHeight); 1019 mSurface = new Surface(mSurfaceTexture); 1020 mSurfaceTexture.setOnFrameAvailableListener(mCompositor); 1021 } 1022 cleanup()1023 public void cleanup() { 1024 mNumTextureUpdated.set(0); 1025 if (mTextureId != 0) { 1026 int[] textures = new int[] { 1027 mTextureId 1028 }; 1029 GLES20.glDeleteTextures(1, textures, 0); 1030 } 1031 GLES20.glFinish(); 1032 if (mSurface != null) { 1033 mSurface.release(); 1034 mSurface = null; 1035 } 1036 if (mSurfaceTexture != null) { 1037 mSurfaceTexture.release(); 1038 mSurfaceTexture = null; 1039 } 1040 } 1041 1042 /** 1043 * make texture as updated so that it can be updated in the next rendering. 1044 */ markTextureUpdated()1045 public void markTextureUpdated() { 1046 mNumTextureUpdated.incrementAndGet(); 1047 } 1048 1049 /** 1050 * update texture for rendering if it is updated. 1051 */ updateTexImageIfNecessary()1052 public void updateTexImageIfNecessary() { 1053 int numTextureUpdated = mNumTextureUpdated.getAndDecrement(); 1054 if (numTextureUpdated > 0) { 1055 if (DBG) { 1056 Log.i(TAG, "updateTexImageIfNecessary " + this); 1057 } 1058 mSurfaceTexture.updateTexImage(); 1059 mSurfaceTexture.getTransformMatrix(mSTMatrix); 1060 } 1061 if (numTextureUpdated < 0) { 1062 fail("should not happen"); 1063 } 1064 } 1065 1066 /** 1067 * draw the window. It will not be drawn at all if the window is not visible. 1068 * @param uSTMatrixHandle shader handler for the STMatrix for texture coordinates 1069 * mapping 1070 * @param aPositionHandle shader handle for vertex position. 1071 * @param aTextureHandle shader handle for texture 1072 */ onDraw(int uSTMatrixHandle, int aPositionHandle, int aTextureHandle)1073 public void onDraw(int uSTMatrixHandle, int aPositionHandle, int aTextureHandle) { 1074 GLES20.glActiveTexture(GLES20.GL_TEXTURE0); 1075 GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mTextureId); 1076 mVerticesData.position(TRIANGLE_VERTICES_DATA_POS_OFFSET); 1077 GLES20.glVertexAttribPointer(aPositionHandle, 3, GLES20.GL_FLOAT, false, 1078 TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mVerticesData); 1079 GLES20.glEnableVertexAttribArray(aPositionHandle); 1080 1081 mVerticesData.position(TRIANGLE_VERTICES_DATA_UV_OFFSET); 1082 GLES20.glVertexAttribPointer(aTextureHandle, 2, GLES20.GL_FLOAT, false, 1083 TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mVerticesData); 1084 GLES20.glEnableVertexAttribArray(aTextureHandle); 1085 GLES20.glUniformMatrix4fv(uSTMatrixHandle, 1, false, mSTMatrix, 0); 1086 GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4); 1087 } 1088 getSurfaceTexture()1089 public SurfaceTexture getSurfaceTexture() { 1090 return mSurfaceTexture; 1091 } 1092 getSurface()1093 public Surface getSurface() { 1094 return mSurface; 1095 } 1096 } 1097 } 1098 checkGlError(String op)1099 static void checkGlError(String op) throws GlException { 1100 int error; 1101 while ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) { 1102 Log.e(TAG, op + ": glError " + error); 1103 throw new GlException(op + ": glError " + error); 1104 } 1105 } 1106 1107 public static class GlException extends Exception { GlException(String msg)1108 public GlException(String msg) { 1109 super(msg); 1110 } 1111 } 1112 1113 private interface Renderer { doRendering(final int color)1114 void doRendering(final int color) throws Exception; 1115 } 1116 1117 private static class RotateVirtualDisplayPresentation extends VirtualDisplayPresentation { 1118 RotateVirtualDisplayPresentation(Context context, Surface surface, int w, int h)1119 RotateVirtualDisplayPresentation(Context context, Surface surface, int w, int h) { 1120 super(context, surface, w, h); 1121 } 1122 1123 @Override doCreatePresentation()1124 protected TestPresentationBase doCreatePresentation() { 1125 return new TestRotatePresentation(mContext, mVirtualDisplay.getDisplay()); 1126 } 1127 1128 } 1129 1130 private static class VirtualDisplayPresentation implements Renderer { 1131 protected final Context mContext; 1132 protected final Surface mSurface; 1133 protected final int mWidth; 1134 protected final int mHeight; 1135 protected VirtualDisplay mVirtualDisplay; 1136 protected TestPresentationBase mPresentation; 1137 private final DisplayManager mDisplayManager; 1138 VirtualDisplayPresentation(Context context, Surface surface, int w, int h)1139 VirtualDisplayPresentation(Context context, Surface surface, int w, int h) { 1140 mContext = context; 1141 mSurface = surface; 1142 mWidth = w; 1143 mHeight = h; 1144 mDisplayManager = (DisplayManager)context.getSystemService(Context.DISPLAY_SERVICE); 1145 } 1146 createVirtualDisplay()1147 void createVirtualDisplay() { 1148 runOnMainSync(new Runnable() { 1149 @Override 1150 public void run() { 1151 mVirtualDisplay = mDisplayManager.createVirtualDisplay( 1152 TAG, mWidth, mHeight, 200, mSurface, 1153 DisplayManager.VIRTUAL_DISPLAY_FLAG_OWN_CONTENT_ONLY | 1154 DisplayManager.VIRTUAL_DISPLAY_FLAG_PRESENTATION); 1155 } 1156 }); 1157 } 1158 destroyVirtualDisplay()1159 void destroyVirtualDisplay() { 1160 runOnMainSync(new Runnable() { 1161 @Override 1162 public void run() { 1163 mVirtualDisplay.release(); 1164 } 1165 }); 1166 } 1167 createPresentation()1168 void createPresentation() { 1169 runOnMainSync(new Runnable() { 1170 @Override 1171 public void run() { 1172 mPresentation = doCreatePresentation(); 1173 mPresentation.show(); 1174 } 1175 }); 1176 } 1177 doCreatePresentation()1178 protected TestPresentationBase doCreatePresentation() { 1179 return new TestPresentation(mContext, mVirtualDisplay.getDisplay()); 1180 } 1181 getPresentation()1182 TestPresentationBase getPresentation() { 1183 return mPresentation; 1184 } 1185 dismissPresentation()1186 void dismissPresentation() { 1187 runOnMainSync(new Runnable() { 1188 @Override 1189 public void run() { 1190 mPresentation.dismiss(); 1191 } 1192 }); 1193 } 1194 1195 @Override doRendering(final int color)1196 public void doRendering(final int color) throws Exception { 1197 runOnMainSync(new Runnable() { 1198 @Override 1199 public void run() { 1200 mPresentation.doRendering(color); 1201 } 1202 }); 1203 } 1204 } 1205 1206 private static class TestPresentationBase extends Presentation { 1207 TestPresentationBase(Context outerContext, Display display)1208 public TestPresentationBase(Context outerContext, Display display) { 1209 // This theme is required to prevent an extra view from obscuring the presentation 1210 super(outerContext, display, 1211 android.R.style.Theme_Holo_Light_NoActionBar_TranslucentDecor); 1212 getWindow().addFlags(WindowManager.LayoutParams.FLAG_LOCAL_FOCUS_MODE); 1213 getWindow().addFlags(WindowManager.LayoutParams.FLAG_HARDWARE_ACCELERATED); 1214 } 1215 doRendering(int color)1216 public void doRendering(int color) { 1217 // to be implemented by child 1218 } 1219 } 1220 1221 private static class TestPresentation extends TestPresentationBase { 1222 private ImageView mImageView; 1223 TestPresentation(Context outerContext, Display display)1224 public TestPresentation(Context outerContext, Display display) { 1225 super(outerContext, display); 1226 } 1227 1228 @Override onCreate(Bundle savedInstanceState)1229 protected void onCreate(Bundle savedInstanceState) { 1230 super.onCreate(savedInstanceState); 1231 mImageView = new ImageView(getContext()); 1232 mImageView.setImageDrawable(new ColorDrawable(COLOR_RED)); 1233 mImageView.setLayoutParams(new LayoutParams( 1234 LayoutParams.MATCH_PARENT, LayoutParams.MATCH_PARENT)); 1235 setContentView(mImageView); 1236 } 1237 doRendering(int color)1238 public void doRendering(int color) { 1239 if (DBG) { 1240 Log.i(TAG, "doRendering " + Integer.toHexString(color)); 1241 } 1242 mImageView.setImageDrawable(new ColorDrawable(color)); 1243 } 1244 } 1245 1246 private static class TestRotatePresentation extends TestPresentationBase { 1247 static final int[] kColors = new int[] {COLOR_GREY, COLOR_RED, COLOR_GREEN, COLOR_BLUE}; 1248 private final ImageView[] mQuadrants = new ImageView[4]; 1249 TestRotatePresentation(Context outerContext, Display display)1250 public TestRotatePresentation(Context outerContext, Display display) { 1251 super(outerContext, display); 1252 } 1253 1254 @Override onCreate(Bundle savedInstanceState)1255 protected void onCreate(Bundle savedInstanceState) { 1256 super.onCreate(savedInstanceState); 1257 Context ctx = getContext(); 1258 TableLayout table = new TableLayout(ctx); 1259 ViewGroup.LayoutParams fill = new ViewGroup.LayoutParams( 1260 ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.MATCH_PARENT); 1261 TableLayout.LayoutParams fillTable = new TableLayout.LayoutParams( 1262 ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.MATCH_PARENT, 1f); 1263 TableRow.LayoutParams fillRow = new TableRow.LayoutParams( 1264 ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.MATCH_PARENT, 1f); 1265 table.setLayoutParams(fill); 1266 table.setStretchAllColumns(true); 1267 TableRow rows[] = new TableRow[] {new TableRow(ctx), new TableRow(ctx)}; 1268 for (int i = 0; i < mQuadrants.length; i++) { 1269 mQuadrants[i] = new ImageView(ctx); 1270 mQuadrants[i].setImageDrawable(new ColorDrawable(kColors[i])); 1271 rows[i / 2].addView(mQuadrants[i], fillRow); 1272 } 1273 for (TableRow row: rows) { 1274 table.addView(row, fillTable); 1275 } 1276 setContentView(table); 1277 Log.v(TAG, "setContentView(table)"); 1278 } 1279 1280 @Override doRendering(int color)1281 public void doRendering(int color) { 1282 Log.v(TAG, "doRendering: ignoring color: " + Integer.toHexString(color)); 1283 for (int i = 0; i < mQuadrants.length; i++) { 1284 mQuadrants[i].setImageDrawable(new ColorDrawable(kColors[i])); 1285 } 1286 } 1287 1288 } 1289 1290 private static class TopWindowPresentation extends TestPresentationBase { 1291 private FrameLayout[] mWindowsLayout = new FrameLayout[MAX_NUM_WINDOWS]; 1292 private CompositionTextureView[] mWindows = new CompositionTextureView[MAX_NUM_WINDOWS]; 1293 private final int mNumWindows; 1294 private final Semaphore mWindowWaitSemaphore = new Semaphore(0); 1295 TopWindowPresentation(int numWindows, Context outerContext, Display display)1296 public TopWindowPresentation(int numWindows, Context outerContext, Display display) { 1297 super(outerContext, display); 1298 mNumWindows = numWindows; 1299 } 1300 1301 @Override onCreate(Bundle savedInstanceState)1302 protected void onCreate(Bundle savedInstanceState) { 1303 super.onCreate(savedInstanceState); 1304 if (DBG) { 1305 Log.i(TAG, "TopWindowPresentation onCreate, numWindows " + mNumWindows); 1306 } 1307 setContentView(R.layout.composition_layout); 1308 mWindowsLayout[0] = (FrameLayout) findViewById(R.id.window0); 1309 mWindowsLayout[1] = (FrameLayout) findViewById(R.id.window1); 1310 mWindowsLayout[2] = (FrameLayout) findViewById(R.id.window2); 1311 } 1312 populateWindows()1313 public void populateWindows() { 1314 runOnMain(new Runnable() { 1315 public void run() { 1316 for (int i = 0; i < mNumWindows; i++) { 1317 mWindows[i] = new CompositionTextureView(getContext()); 1318 mWindows[i].setLayoutParams(new ViewGroup.LayoutParams( 1319 ViewGroup.LayoutParams.MATCH_PARENT, 1320 ViewGroup.LayoutParams.MATCH_PARENT)); 1321 mWindowsLayout[i].setVisibility(View.VISIBLE); 1322 mWindowsLayout[i].addView(mWindows[i]); 1323 mWindows[i].startListening(); 1324 } 1325 mWindowWaitSemaphore.release(); 1326 } 1327 }); 1328 } 1329 waitForSurfaceReady(long timeoutMs)1330 public void waitForSurfaceReady(long timeoutMs) throws Exception { 1331 mWindowWaitSemaphore.tryAcquire(DEFAULT_WAIT_TIMEOUT_MS, TimeUnit.MILLISECONDS); 1332 for (int i = 0; i < mNumWindows; i++) { 1333 if(!mWindows[i].waitForSurfaceReady(timeoutMs)) { 1334 fail("surface wait timeout"); 1335 } 1336 } 1337 } 1338 getSurface(int windowIndex)1339 public Surface getSurface(int windowIndex) { 1340 Surface surface = mWindows[windowIndex].getSurface(); 1341 assertNotNull(surface); 1342 return surface; 1343 } 1344 } 1345 1346 private static class TopWindowVirtualDisplayPresentation extends VirtualDisplayPresentation { 1347 private final int mNumWindows; 1348 TopWindowVirtualDisplayPresentation(Context context, Surface surface, int w, int h, int numWindows)1349 TopWindowVirtualDisplayPresentation(Context context, Surface surface, int w, int h, 1350 int numWindows) { 1351 super(context, surface, w, h); 1352 assertNotNull(surface); 1353 mNumWindows = numWindows; 1354 } 1355 waitForSurfaceReady(long timeoutMs)1356 void waitForSurfaceReady(long timeoutMs) throws Exception { 1357 ((TopWindowPresentation) mPresentation).waitForSurfaceReady(timeoutMs); 1358 } 1359 getSurface(int windowIndex)1360 Surface getSurface(int windowIndex) { 1361 return ((TopWindowPresentation) mPresentation).getSurface(windowIndex); 1362 } 1363 doCreatePresentation()1364 protected TestPresentationBase doCreatePresentation() { 1365 return new TopWindowPresentation(mNumWindows, mContext, mVirtualDisplay.getDisplay()); 1366 } 1367 } 1368 1369 private static class RemoteVirtualDisplayPresentation implements Renderer { 1370 /** argument: Surface, int w, int h, return none */ 1371 private static final int BINDER_CMD_START = IBinder.FIRST_CALL_TRANSACTION; 1372 /** argument: int color, return none */ 1373 private static final int BINDER_CMD_RENDER = IBinder.FIRST_CALL_TRANSACTION + 1; 1374 1375 private final Context mContext; 1376 private final Surface mSurface; 1377 private final int mWidth; 1378 private final int mHeight; 1379 1380 private IBinder mService; 1381 private final Semaphore mConnectionWait = new Semaphore(0); 1382 private final ServiceConnection mConnection = new ServiceConnection() { 1383 1384 public void onServiceConnected(ComponentName arg0, IBinder arg1) { 1385 mService = arg1; 1386 mConnectionWait.release(); 1387 } 1388 1389 public void onServiceDisconnected(ComponentName arg0) { 1390 //ignore 1391 } 1392 1393 }; 1394 RemoteVirtualDisplayPresentation(Context context, Surface surface, int w, int h)1395 RemoteVirtualDisplayPresentation(Context context, Surface surface, int w, int h) { 1396 mContext = context; 1397 mSurface = surface; 1398 mWidth = w; 1399 mHeight = h; 1400 } 1401 connect()1402 void connect() throws Exception { 1403 Intent intent = new Intent(); 1404 intent.setClassName("android.media.codec.cts", 1405 "android.media.codec.cts.RemoteVirtualDisplayService"); 1406 mContext.bindService(intent, mConnection, Context.BIND_AUTO_CREATE); 1407 if (!mConnectionWait.tryAcquire(DEFAULT_WAIT_TIMEOUT_MS, TimeUnit.MILLISECONDS)) { 1408 fail("cannot bind to service"); 1409 } 1410 } 1411 disconnect()1412 void disconnect() { 1413 mContext.unbindService(mConnection); 1414 } 1415 start()1416 void start() throws Exception { 1417 Parcel parcel = Parcel.obtain(); 1418 mSurface.writeToParcel(parcel, 0); 1419 parcel.writeInt(mWidth); 1420 parcel.writeInt(mHeight); 1421 mService.transact(BINDER_CMD_START, parcel, null, 0); 1422 } 1423 1424 @Override doRendering(int color)1425 public void doRendering(int color) throws Exception { 1426 Parcel parcel = Parcel.obtain(); 1427 parcel.writeInt(color); 1428 mService.transact(BINDER_CMD_RENDER, parcel, null, 0); 1429 } 1430 } 1431 getMaxSupportedEncoderSize()1432 private static Size getMaxSupportedEncoderSize() { 1433 final Size[] standardSizes = new Size[] { 1434 new Size(1920, 1080), 1435 new Size(1280, 720), 1436 new Size(720, 480), 1437 new Size(352, 576) 1438 }; 1439 1440 MediaCodecList mcl = new MediaCodecList(MediaCodecList.REGULAR_CODECS); 1441 for (Size sz : standardSizes) { 1442 MediaFormat format = MediaFormat.createVideoFormat( 1443 MIME_TYPE, sz.getWidth(), sz.getHeight()); 1444 format.setInteger(MediaFormat.KEY_COLOR_FORMAT, 1445 MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface); 1446 int bitRate = BITRATE_DEFAULT; 1447 if (sz.getWidth() == 1920 && sz.getHeight() == 1080) { 1448 bitRate = BITRATE_1080p; 1449 } else if (sz.getWidth() == 1280 && sz.getHeight() == 720) { 1450 bitRate = BITRATE_720p; 1451 } else if (sz.getWidth() == 800 && sz.getHeight() == 480) { 1452 bitRate = BITRATE_800x480; 1453 } 1454 format.setInteger(MediaFormat.KEY_BIT_RATE, bitRate); 1455 format.setInteger(MediaFormat.KEY_FRAME_RATE, 30); 1456 format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL); 1457 Log.i(TAG,"format = " + format.toString()); 1458 if (mcl.findEncoderForFormat(format) != null) { 1459 return sz; 1460 } 1461 } 1462 return null; 1463 } 1464 1465 /** 1466 * Check maximum concurrent encoding / decoding resolution allowed. 1467 * Some H/Ws cannot support maximum resolution reported in encoder if decoder is running 1468 * at the same time. 1469 * Check is done for 4 different levels: 1080p, 720p, 800x480, 480p 1470 * (The last one is required by CDD.) 1471 */ checkMaxConcurrentEncodingDecodingResolution()1472 public Size checkMaxConcurrentEncodingDecodingResolution() { 1473 if (isConcurrentEncodingDecodingSupported(MIME_TYPE, 1920, 1080, BITRATE_1080p)) { 1474 return new Size(1920, 1080); 1475 } else if (isConcurrentEncodingDecodingSupported(MIME_TYPE, 1280, 720, BITRATE_720p)) { 1476 return new Size(1280, 720); 1477 } else if (isConcurrentEncodingDecodingSupported(MIME_TYPE, 800, 480, BITRATE_800x480)) { 1478 return new Size(800, 480); 1479 } else if (isConcurrentEncodingDecodingSupported(MIME_TYPE, 720, 480, BITRATE_DEFAULT)) { 1480 return new Size(720, 480); 1481 } 1482 Log.i(TAG, "SKIPPING test: concurrent encoding and decoding is not supported"); 1483 return null; 1484 } 1485 isConcurrentEncodingDecodingSupported( String mimeType, int w, int h, int bitRate)1486 public boolean isConcurrentEncodingDecodingSupported( 1487 String mimeType, int w, int h, int bitRate) { 1488 return isConcurrentEncodingDecodingSupported(mimeType, w, h, bitRate, null); 1489 } 1490 isConcurrentEncodingDecodingSupported( String mimeType, int w, int h, int bitRate, String decoderName)1491 public boolean isConcurrentEncodingDecodingSupported( 1492 String mimeType, int w, int h, int bitRate, String decoderName) { 1493 MediaCodecList mcl = new MediaCodecList(MediaCodecList.REGULAR_CODECS); 1494 MediaFormat testFormat = MediaFormat.createVideoFormat(mimeType, w, h); 1495 testFormat.setInteger(MediaFormat.KEY_BIT_RATE, bitRate); 1496 testFormat.setInteger(MediaFormat.KEY_FRAME_RATE, 30); 1497 if (mcl.findDecoderForFormat(testFormat) == null 1498 || mcl.findEncoderForFormat(testFormat) == null) { 1499 return false; 1500 } 1501 1502 MediaCodec decoder = null; 1503 OutputSurface decodingSurface = null; 1504 MediaCodec encoder = null; 1505 Surface encodingSurface = null; 1506 try { 1507 if (decoderName == null) { 1508 decoder = MediaCodec.createDecoderByType(mimeType); 1509 } else { 1510 decoder = MediaCodec.createByCodecName(decoderName); 1511 } 1512 MediaFormat decoderFormat = MediaFormat.createVideoFormat(mimeType, w, h); 1513 decodingSurface = new OutputSurface(w, h); 1514 decodingSurface.makeCurrent(); 1515 decoder.configure(decoderFormat, decodingSurface.getSurface(), null, 0); 1516 decoder.start(); 1517 1518 MediaFormat format = MediaFormat.createVideoFormat(mimeType, w, h); 1519 format.setInteger(MediaFormat.KEY_COLOR_FORMAT, 1520 MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface); 1521 format.setInteger(MediaFormat.KEY_BIT_RATE, bitRate); 1522 format.setInteger(MediaFormat.KEY_FRAME_RATE, 30); 1523 format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL); 1524 encoder = MediaCodec.createEncoderByType(mimeType); 1525 encoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); 1526 encodingSurface = encoder.createInputSurface(); 1527 encoder.start(); 1528 1529 encoder.stop(); 1530 decoder.stop(); 1531 } catch (Exception e) { 1532 e.printStackTrace(); 1533 Log.i(TAG, "This H/W does not support w:" + w + " h:" + h); 1534 return false; 1535 } finally { 1536 if (encodingSurface != null) { 1537 encodingSurface.release(); 1538 } 1539 if (encoder != null) { 1540 encoder.release(); 1541 } 1542 if (decoder != null) { 1543 decoder.release(); 1544 } 1545 if (decodingSurface != null) { 1546 decodingSurface.release(); 1547 } 1548 } 1549 return true; 1550 } 1551 runOnMain(Runnable runner)1552 private static void runOnMain(Runnable runner) { 1553 sHandlerForRunOnMain.post(runner); 1554 } 1555 runOnMainSync(Runnable runner)1556 private static void runOnMainSync(Runnable runner) { 1557 SyncRunnable sr = new SyncRunnable(runner); 1558 sHandlerForRunOnMain.post(sr); 1559 sr.waitForComplete(); 1560 } 1561 1562 private static final class SyncRunnable implements Runnable { 1563 private final Runnable mTarget; 1564 private boolean mComplete; 1565 SyncRunnable(Runnable target)1566 public SyncRunnable(Runnable target) { 1567 mTarget = target; 1568 } 1569 run()1570 public void run() { 1571 mTarget.run(); 1572 synchronized (this) { 1573 mComplete = true; 1574 notifyAll(); 1575 } 1576 } 1577 waitForComplete()1578 public void waitForComplete() { 1579 synchronized (this) { 1580 while (!mComplete) { 1581 try { 1582 wait(); 1583 } catch (InterruptedException e) { 1584 //ignore 1585 } 1586 } 1587 } 1588 } 1589 } 1590 } 1591