1 /* 2 * Copyright (C) 2013 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 package android.media.cts; 18 19 import android.app.Presentation; 20 import android.media.MediaCodec; 21 import android.media.MediaCodecInfo; 22 import android.media.MediaCodecList; 23 import android.media.MediaFormat; 24 import android.content.Context; 25 import android.graphics.drawable.ColorDrawable; 26 import android.hardware.display.DisplayManager; 27 import android.hardware.display.VirtualDisplay; 28 import android.opengl.GLES20; 29 import android.os.Bundle; 30 import android.os.Handler; 31 import android.os.Looper; 32 import android.test.AndroidTestCase; 33 import android.util.DisplayMetrics; 34 import android.util.Log; 35 import android.view.Display; 36 import android.view.Surface; 37 import android.view.WindowManager; 38 import android.view.ViewGroup.LayoutParams; 39 import android.widget.ImageView; 40 41 import java.io.FileOutputStream; 42 import java.io.IOException; 43 import java.nio.ByteBuffer; 44 45 46 /** 47 * Tests connecting a virtual display to the input of a MediaCodec encoder. 48 * <p> 49 * Other test cases exercise these independently in more depth. The goal here is to make sure 50 * that virtual displays and MediaCodec can be used together. 51 * <p> 52 * We can't control frame-by-frame what appears on the virtual display, because we're 53 * just throwing a Presentation and a View at it. Further, it's possible that frames 54 * will be dropped if they arrive faster than they are consumed, so any given frame 55 * may not appear at all. We can't wait for a series of actions to complete by watching 56 * the output, because the frames are going directly to the encoder, and the encoder may 57 * collect a number of frames before producing output. 58 * <p> 59 * The test puts up a series of colored screens, expecting to see all of them, and in order. 60 * Any black screens that appear before or after are ignored. 61 */ 62 public class EncodeVirtualDisplayTest extends AndroidTestCase { 63 private static final String TAG = "EncodeVirtualTest"; 64 private static final boolean VERBOSE = false; // lots of logging 65 private static final boolean DEBUG_SAVE_FILE = false; // save copy of encoded movie 66 private static final String DEBUG_FILE_NAME_BASE = "/sdcard/test."; 67 68 // Encoder parameters table, sort by encoder level from high to low. 69 private static final int[][] ENCODER_PARAM_TABLE = { 70 // encoder level, width, height, bitrate, framerate 71 {MediaCodecInfo.CodecProfileLevel.AVCLevel31, 1280, 720, 14000000, 30}, 72 {MediaCodecInfo.CodecProfileLevel.AVCLevel3, 720, 480, 10000000, 30}, 73 {MediaCodecInfo.CodecProfileLevel.AVCLevel22, 720, 480, 4000000, 15}, 74 {MediaCodecInfo.CodecProfileLevel.AVCLevel21, 352, 576, 4000000, 25}, 75 }; 76 77 // Virtual display characteristics. Scaled down from full display size because not all 78 // devices can encode at the resolution of their own display. 79 private static final String NAME = TAG; 80 private static int sWidth = ENCODER_PARAM_TABLE[ENCODER_PARAM_TABLE.length-1][1]; 81 private static int sHeight = ENCODER_PARAM_TABLE[ENCODER_PARAM_TABLE.length-1][2]; 82 private static final int DENSITY = DisplayMetrics.DENSITY_HIGH; 83 private static final int UI_TIMEOUT_MS = 2000; 84 private static final int UI_RENDER_PAUSE_MS = 400; 85 86 // Encoder parameters. We use the same width/height as the virtual display. 87 private static final String MIME_TYPE = "video/avc"; 88 private static int sFrameRate = 15; // 15fps 89 private static final int IFRAME_INTERVAL = 10; // 10 seconds between I-frames 90 private static int sBitRate = 6000000; // 6Mbps 91 92 // Colors to test (RGB). These must convert cleanly to and from BT.601 YUV. 93 private static final int TEST_COLORS[] = { 94 makeColor(10, 100, 200), // YCbCr 89,186,82 95 makeColor(100, 200, 10), // YCbCr 144,60,98 96 makeColor(200, 10, 100), // YCbCr 203,10,103 97 makeColor(10, 200, 100), // YCbCr 130,113,52 98 makeColor(100, 10, 200), // YCbCr 67,199,154 99 makeColor(200, 100, 10), // YCbCr 119,74,179 100 }; 101 102 private final ByteBuffer mPixelBuf = ByteBuffer.allocateDirect(4); 103 private Handler mUiHandler; // Handler on main Looper 104 private DisplayManager mDisplayManager; 105 volatile boolean mInputDone; 106 107 /* TEST_COLORS static initialization; need ARGB for ColorDrawable */ makeColor(int red, int green, int blue)108 private static int makeColor(int red, int green, int blue) { 109 return 0xff << 24 | (red & 0xff) << 16 | (green & 0xff) << 8 | (blue & 0xff); 110 } 111 112 @Override setUp()113 protected void setUp() throws Exception { 114 super.setUp(); 115 116 mUiHandler = new Handler(Looper.getMainLooper()); 117 mDisplayManager = (DisplayManager)mContext.getSystemService(Context.DISPLAY_SERVICE); 118 setupEncoderParameters(); 119 } 120 121 /** 122 * Basic test. 123 * 124 * @throws Exception 125 */ testEncodeVirtualDisplay()126 public void testEncodeVirtualDisplay() throws Throwable { 127 EncodeVirtualWrapper.runTest(this); 128 } 129 130 /** 131 * Wraps encodeVirtualTest, running it in a new thread. Required because of the way 132 * SurfaceTexture.OnFrameAvailableListener works when the current thread has a Looper 133 * configured. 134 */ 135 private static class EncodeVirtualWrapper implements Runnable { 136 private Throwable mThrowable; 137 private EncodeVirtualDisplayTest mTest; 138 EncodeVirtualWrapper(EncodeVirtualDisplayTest test)139 private EncodeVirtualWrapper(EncodeVirtualDisplayTest test) { 140 mTest = test; 141 } 142 143 @Override run()144 public void run() { 145 try { 146 mTest.encodeVirtualDisplayTest(); 147 } catch (Throwable th) { 148 mThrowable = th; 149 } 150 } 151 152 /** Entry point. */ runTest(EncodeVirtualDisplayTest obj)153 public static void runTest(EncodeVirtualDisplayTest obj) throws Throwable { 154 EncodeVirtualWrapper wrapper = new EncodeVirtualWrapper(obj); 155 Thread th = new Thread(wrapper, "codec test"); 156 th.start(); 157 th.join(); 158 if (wrapper.mThrowable != null) { 159 throw wrapper.mThrowable; 160 } 161 } 162 } 163 hasCodec(String mimeType)164 private static boolean hasCodec(String mimeType) { 165 int numCodecs = MediaCodecList.getCodecCount(); 166 for (int i = 0; i < numCodecs; i++) { 167 MediaCodecInfo codecInfo = MediaCodecList.getCodecInfoAt(i); 168 169 if (!codecInfo.isEncoder()) { 170 continue; 171 } 172 173 String[] types = codecInfo.getSupportedTypes(); 174 for (int j = 0; j < types.length; j++) { 175 if (types[j].equalsIgnoreCase(mimeType)) { 176 return true; 177 } 178 } 179 } 180 return false; 181 } 182 183 /** 184 * Returns true if the encoder level, specified in the ENCODER_PARAM_TABLE, can be supported. 185 */ verifySupportForEncoderLevel(int index)186 private static boolean verifySupportForEncoderLevel(int index) { 187 int numCodecs = MediaCodecList.getCodecCount(); 188 for (int i = 0; i < numCodecs; i++) { 189 MediaCodecInfo codecInfo = MediaCodecList.getCodecInfoAt(i); 190 191 if (!codecInfo.isEncoder()) { 192 continue; 193 } 194 195 String[] types = codecInfo.getSupportedTypes(); 196 for (int j = 0; j < types.length; j++) { 197 198 if (false == types[j].equalsIgnoreCase(MIME_TYPE)) { 199 continue; 200 } 201 202 MediaCodecInfo.CodecCapabilities caps = codecInfo.getCapabilitiesForType(types[j]); 203 for (int k = 0; k < caps.profileLevels.length; k++) { 204 int profile = caps.profileLevels[k].profile; 205 int level = caps.profileLevels[k].level; 206 //Log.d(TAG, "[" + k + "] supported profile = " + profile + ", level = " + level); 207 if (caps.profileLevels[k].level >= ENCODER_PARAM_TABLE[index][0]) { 208 return true; 209 } 210 } 211 } 212 } 213 return false; 214 } 215 216 /** 217 * Initialize the encoder parameters according to the device capability. 218 */ setupEncoderParameters()219 private static void setupEncoderParameters() { 220 221 // Loop over each tabel entry until a proper encoder setting is found. 222 for (int i = 0; i < ENCODER_PARAM_TABLE.length; i++) { 223 224 // Check if we can support it? 225 if (verifySupportForEncoderLevel(i)) { 226 227 sWidth = ENCODER_PARAM_TABLE[i][1]; 228 sHeight = ENCODER_PARAM_TABLE[i][2]; 229 sBitRate = ENCODER_PARAM_TABLE[i][3]; 230 sFrameRate = ENCODER_PARAM_TABLE[i][4]; 231 232 Log.d(TAG, "encoder parameters changed: width = " + sWidth + ", height = " + sHeight 233 + ", bitrate = " + sBitRate + ", framerate = " + sFrameRate); 234 break; 235 } 236 } 237 } 238 239 /** 240 * Prepares the encoder, decoder, and virtual display. 241 */ encodeVirtualDisplayTest()242 private void encodeVirtualDisplayTest() throws IOException { 243 MediaCodec encoder = null; 244 MediaCodec decoder = null; 245 OutputSurface outputSurface = null; 246 VirtualDisplay virtualDisplay = null; 247 248 // Don't run the test of the codec isn't present. 249 if (!hasCodec(MIME_TYPE)) { 250 return; 251 } 252 253 try { 254 // Encoded video resolution matches virtual display. 255 MediaFormat encoderFormat = MediaFormat.createVideoFormat(MIME_TYPE, sWidth, sHeight); 256 encoderFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, 257 MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface); 258 encoderFormat.setInteger(MediaFormat.KEY_BIT_RATE, sBitRate); 259 encoderFormat.setInteger(MediaFormat.KEY_FRAME_RATE, sFrameRate); 260 encoderFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL); 261 262 encoder = MediaCodec.createEncoderByType(MIME_TYPE); 263 encoder.configure(encoderFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); 264 Surface inputSurface = encoder.createInputSurface(); 265 encoder.start(); 266 267 // Create a virtual display that will output to our encoder. 268 virtualDisplay = mDisplayManager.createVirtualDisplay(NAME, 269 sWidth, sHeight, DENSITY, inputSurface, 0); 270 271 // We also need a decoder to check the output of the encoder. 272 decoder = MediaCodec.createDecoderByType(MIME_TYPE); 273 MediaFormat decoderFormat = MediaFormat.createVideoFormat(MIME_TYPE, sWidth, sHeight); 274 outputSurface = new OutputSurface(sWidth, sHeight); 275 decoder.configure(decoderFormat, outputSurface.getSurface(), null, 0); 276 decoder.start(); 277 278 // Run the color slide show on a separate thread. 279 mInputDone = false; 280 new ColorSlideShow(virtualDisplay.getDisplay()).start(); 281 282 // Record everything we can and check the results. 283 doTestEncodeVirtual(encoder, decoder, outputSurface); 284 285 } finally { 286 if (VERBOSE) Log.d(TAG, "releasing codecs, surfaces, and virtual display"); 287 if (virtualDisplay != null) { 288 virtualDisplay.release(); 289 } 290 if (outputSurface != null) { 291 outputSurface.release(); 292 } 293 if (encoder != null) { 294 encoder.stop(); 295 encoder.release(); 296 } 297 if (decoder != null) { 298 decoder.stop(); 299 decoder.release(); 300 } 301 } 302 } 303 304 /** 305 * Drives the encoder and decoder. 306 */ doTestEncodeVirtual(MediaCodec encoder, MediaCodec decoder, OutputSurface outputSurface)307 private void doTestEncodeVirtual(MediaCodec encoder, MediaCodec decoder, 308 OutputSurface outputSurface) { 309 final int TIMEOUT_USEC = 10000; 310 ByteBuffer[] encoderOutputBuffers = encoder.getOutputBuffers(); 311 ByteBuffer[] decoderInputBuffers = decoder.getInputBuffers(); 312 MediaCodec.BufferInfo info = new MediaCodec.BufferInfo(); 313 boolean inputEosSignaled = false; 314 int lastIndex = -1; 315 int goodFrames = 0; 316 int debugFrameCount = 0; 317 318 // Save a copy to disk. Useful for debugging the test. Note this is a raw elementary 319 // stream, not a .mp4 file, so not all players will know what to do with it. 320 FileOutputStream outputStream = null; 321 if (DEBUG_SAVE_FILE) { 322 String fileName = DEBUG_FILE_NAME_BASE + sWidth + "x" + sHeight + ".mp4"; 323 try { 324 outputStream = new FileOutputStream(fileName); 325 Log.d(TAG, "encoded output will be saved as " + fileName); 326 } catch (IOException ioe) { 327 Log.w(TAG, "Unable to create debug output file " + fileName); 328 throw new RuntimeException(ioe); 329 } 330 } 331 332 // Loop until the output side is done. 333 boolean encoderDone = false; 334 boolean outputDone = false; 335 while (!outputDone) { 336 if (VERBOSE) Log.d(TAG, "loop"); 337 338 if (!inputEosSignaled && mInputDone) { 339 if (VERBOSE) Log.d(TAG, "signaling input EOS"); 340 encoder.signalEndOfInputStream(); 341 inputEosSignaled = true; 342 } 343 344 boolean decoderOutputAvailable = true; 345 boolean encoderOutputAvailable = !encoderDone; 346 while (decoderOutputAvailable || encoderOutputAvailable) { 347 // Start by draining any pending output from the decoder. It's important to 348 // do this before we try to stuff any more data in. 349 int decoderStatus = decoder.dequeueOutputBuffer(info, TIMEOUT_USEC); 350 if (decoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) { 351 // no output available yet 352 if (VERBOSE) Log.d(TAG, "no output from decoder available"); 353 decoderOutputAvailable = false; 354 } else if (decoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) { 355 if (VERBOSE) Log.d(TAG, "decoder output buffers changed (but we don't care)"); 356 } else if (decoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { 357 // this happens before the first frame is returned 358 MediaFormat decoderOutputFormat = decoder.getOutputFormat(); 359 if (VERBOSE) Log.d(TAG, "decoder output format changed: " + 360 decoderOutputFormat); 361 } else if (decoderStatus < 0) { 362 fail("unexpected result from deocder.dequeueOutputBuffer: " + decoderStatus); 363 } else { // decoderStatus >= 0 364 if (VERBOSE) Log.d(TAG, "surface decoder given buffer " + decoderStatus + 365 " (size=" + info.size + ")"); 366 if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { 367 if (VERBOSE) Log.d(TAG, "output EOS"); 368 outputDone = true; 369 } 370 371 // The ByteBuffers are null references, but we still get a nonzero size for 372 // the decoded data. 373 boolean doRender = (info.size != 0); 374 375 // As soon as we call releaseOutputBuffer, the buffer will be forwarded 376 // to SurfaceTexture to convert to a texture. The API doesn't guarantee 377 // that the texture will be available before the call returns, so we 378 // need to wait for the onFrameAvailable callback to fire. If we don't 379 // wait, we risk dropping frames. 380 outputSurface.makeCurrent(); 381 decoder.releaseOutputBuffer(decoderStatus, doRender); 382 if (doRender) { 383 if (VERBOSE) Log.d(TAG, "awaiting frame " + (lastIndex+1)); 384 outputSurface.awaitNewImage(); 385 outputSurface.drawImage(); 386 int foundIndex = checkSurfaceFrame(); 387 if (foundIndex == lastIndex + 1) { 388 // found the next one in the series 389 lastIndex = foundIndex; 390 goodFrames++; 391 } else if (foundIndex == lastIndex) { 392 // Sometimes we see the same color two frames in a row. 393 if (VERBOSE) Log.d(TAG, "Got another " + lastIndex); 394 } else if (foundIndex > 0) { 395 // Looks like we missed a color frame. It's possible something 396 // stalled and we dropped a frame. Skip forward to see if we 397 // can catch the rest. 398 if (foundIndex < lastIndex) { 399 Log.w(TAG, "Ignoring backward skip from " + 400 lastIndex + " to " + foundIndex); 401 } else { 402 Log.w(TAG, "Frame skipped, advancing lastIndex from " + 403 lastIndex + " to " + foundIndex); 404 goodFrames++; 405 lastIndex = foundIndex; 406 } 407 } 408 } 409 } 410 if (decoderStatus != MediaCodec.INFO_TRY_AGAIN_LATER) { 411 // Continue attempts to drain output. 412 continue; 413 } 414 415 // Decoder is drained, check to see if we've got a new buffer of output from 416 // the encoder. 417 if (!encoderDone) { 418 int encoderStatus = encoder.dequeueOutputBuffer(info, TIMEOUT_USEC); 419 if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) { 420 // no output available yet 421 if (VERBOSE) Log.d(TAG, "no output from encoder available"); 422 encoderOutputAvailable = false; 423 } else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) { 424 // not expected for an encoder 425 encoderOutputBuffers = encoder.getOutputBuffers(); 426 if (VERBOSE) Log.d(TAG, "encoder output buffers changed"); 427 } else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { 428 // received before first buffer 429 MediaFormat newFormat = encoder.getOutputFormat(); 430 if (VERBOSE) Log.d(TAG, "encoder output format changed: " + newFormat); 431 } else if (encoderStatus < 0) { 432 fail("unexpected result from encoder.dequeueOutputBuffer: " + encoderStatus); 433 } else { // encoderStatus >= 0 434 ByteBuffer encodedData = encoderOutputBuffers[encoderStatus]; 435 if (encodedData == null) { 436 fail("encoderOutputBuffer " + encoderStatus + " was null"); 437 } 438 439 // It's usually necessary to adjust the ByteBuffer values to match BufferInfo. 440 encodedData.position(info.offset); 441 encodedData.limit(info.offset + info.size); 442 443 if (outputStream != null) { 444 byte[] data = new byte[info.size]; 445 encodedData.get(data); 446 encodedData.position(info.offset); 447 try { 448 outputStream.write(data); 449 } catch (IOException ioe) { 450 Log.w(TAG, "failed writing debug data to file"); 451 throw new RuntimeException(ioe); 452 } 453 debugFrameCount++; 454 } 455 456 // Get a decoder input buffer, blocking until it's available. We just 457 // drained the decoder output, so we expect there to be a free input 458 // buffer now or in the near future (i.e. this should never deadlock 459 // if the codec is meeting requirements). 460 // 461 // The first buffer of data we get will have the BUFFER_FLAG_CODEC_CONFIG 462 // flag set; the decoder will see this and finish configuring itself. 463 int inputBufIndex = decoder.dequeueInputBuffer(-1); 464 ByteBuffer inputBuf = decoderInputBuffers[inputBufIndex]; 465 inputBuf.clear(); 466 inputBuf.put(encodedData); 467 decoder.queueInputBuffer(inputBufIndex, 0, info.size, 468 info.presentationTimeUs, info.flags); 469 470 // If everything from the encoder has been passed to the decoder, we 471 // can stop polling the encoder output. (This just an optimization.) 472 if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { 473 encoderDone = true; 474 encoderOutputAvailable = false; 475 } 476 if (VERBOSE) Log.d(TAG, "passed " + info.size + " bytes to decoder" 477 + (encoderDone ? " (EOS)" : "")); 478 479 encoder.releaseOutputBuffer(encoderStatus, false); 480 } 481 } 482 } 483 } 484 485 if (outputStream != null) { 486 try { 487 outputStream.close(); 488 if (VERBOSE) Log.d(TAG, "Wrote " + debugFrameCount + " frames"); 489 } catch (IOException ioe) { 490 Log.w(TAG, "failed closing debug file"); 491 throw new RuntimeException(ioe); 492 } 493 } 494 495 if (goodFrames != TEST_COLORS.length) { 496 fail("Found " + goodFrames + " of " + TEST_COLORS.length + " expected frames"); 497 } 498 } 499 500 /** 501 * Checks the contents of the current EGL surface to see if it matches expectations. 502 * <p> 503 * The surface may be black or one of the colors we've drawn. We have sufficiently little 504 * control over the rendering process that we don't know how many (if any) black frames 505 * will appear between each color frame. 506 * <p> 507 * @return the color index, or -2 for black 508 * @throw RuntimeException if the color isn't recognized (probably because the RGB<->YUV 509 * conversion introduced too much variance) 510 */ checkSurfaceFrame()511 private int checkSurfaceFrame() { 512 boolean frameFailed = false; 513 514 // Read a pixel from the center of the surface. Might want to read from multiple points 515 // and average them together. 516 int x = sWidth / 2; 517 int y = sHeight / 2; 518 GLES20.glReadPixels(x, y, 1, 1, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, mPixelBuf); 519 int r = mPixelBuf.get(0) & 0xff; 520 int g = mPixelBuf.get(1) & 0xff; 521 int b = mPixelBuf.get(2) & 0xff; 522 if (VERBOSE) Log.d(TAG, "GOT: r=" + r + " g=" + g + " b=" + b); 523 524 if (approxEquals(0, r) && approxEquals(0, g) && approxEquals(0, b)) { 525 return -2; 526 } 527 528 // Walk through the color list and try to find a match. These may have gone through 529 // RGB<->YCbCr conversions, so don't expect exact matches. 530 for (int i = 0; i < TEST_COLORS.length; i++) { 531 int testRed = (TEST_COLORS[i] >> 16) & 0xff; 532 int testGreen = (TEST_COLORS[i] >> 8) & 0xff; 533 int testBlue = TEST_COLORS[i] & 0xff; 534 if (approxEquals(testRed, r) && approxEquals(testGreen, g) && 535 approxEquals(testBlue, b)) { 536 if (VERBOSE) Log.d(TAG, "Matched color " + i + ": r=" + r + " g=" + g + " b=" + b); 537 return i; 538 } 539 } 540 541 throw new RuntimeException("No match for color r=" + r + " g=" + g + " b=" + b); 542 } 543 544 /** 545 * Determines if two color values are approximately equal. 546 */ approxEquals(int expected, int actual)547 private static boolean approxEquals(int expected, int actual) { 548 final int MAX_DELTA = 4; 549 return Math.abs(expected - actual) <= MAX_DELTA; 550 } 551 552 /** 553 * Creates a series of colorful Presentations on the specified Display. 554 */ 555 private class ColorSlideShow extends Thread { 556 private Display mDisplay; 557 ColorSlideShow(Display display)558 public ColorSlideShow(Display display) { 559 mDisplay = display; 560 } 561 562 @Override run()563 public void run() { 564 for (int i = 0; i < TEST_COLORS.length; i++) { 565 showPresentation(TEST_COLORS[i]); 566 } 567 568 if (VERBOSE) Log.d(TAG, "slide show finished"); 569 mInputDone = true; 570 } 571 showPresentation(final int color)572 private void showPresentation(final int color) { 573 final TestPresentation[] presentation = new TestPresentation[1]; 574 try { 575 runOnUiThread(new Runnable() { 576 @Override 577 public void run() { 578 // Want to create presentation on UI thread so it finds the right Looper 579 // when setting up the Dialog. 580 presentation[0] = new TestPresentation(getContext(), mDisplay, color); 581 if (VERBOSE) Log.d(TAG, "showing color=0x" + Integer.toHexString(color)); 582 presentation[0].show(); 583 } 584 }); 585 586 // Give the presentation an opportunity to render. We don't have a way to 587 // monitor the output, so we just sleep for a bit. 588 try { Thread.sleep(UI_RENDER_PAUSE_MS); } 589 catch (InterruptedException ignore) {} 590 } finally { 591 if (presentation[0] != null) { 592 runOnUiThread(new Runnable() { 593 @Override 594 public void run() { 595 presentation[0].dismiss(); 596 } 597 }); 598 } 599 } 600 } 601 } 602 603 /** 604 * Executes a runnable on the UI thread, and waits for it to complete. 605 */ runOnUiThread(Runnable runnable)606 private void runOnUiThread(Runnable runnable) { 607 Runnable waiter = new Runnable() { 608 @Override 609 public void run() { 610 synchronized (this) { 611 notifyAll(); 612 } 613 } 614 }; 615 synchronized (waiter) { 616 mUiHandler.post(runnable); 617 mUiHandler.post(waiter); 618 try { 619 waiter.wait(UI_TIMEOUT_MS); 620 } catch (InterruptedException ex) { 621 } 622 } 623 } 624 625 /** 626 * Presentation we can show on a virtual display. The view is set to a single color value. 627 */ 628 private class TestPresentation extends Presentation { 629 private final int mColor; 630 TestPresentation(Context context, Display display, int color)631 public TestPresentation(Context context, Display display, int color) { 632 super(context, display); 633 mColor = color; 634 } 635 636 @Override onCreate(Bundle savedInstanceState)637 protected void onCreate(Bundle savedInstanceState) { 638 super.onCreate(savedInstanceState); 639 640 setTitle("Encode Virtual Test"); 641 getWindow().setType(WindowManager.LayoutParams.TYPE_PRIVATE_PRESENTATION); 642 643 // Create a solid color image to use as the content of the presentation. 644 ImageView view = new ImageView(getContext()); 645 view.setImageDrawable(new ColorDrawable(mColor)); 646 view.setLayoutParams(new LayoutParams( 647 LayoutParams.MATCH_PARENT, LayoutParams.MATCH_PARENT)); 648 setContentView(view); 649 } 650 } 651 } 652