1 /* 2 * Copyright (C) 2007 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 package android.test; 18 19 import com.android.internal.util.Predicate; 20 import com.android.internal.util.Predicates; 21 22 import android.app.Activity; 23 import android.app.Instrumentation; 24 import android.os.Bundle; 25 import android.os.Debug; 26 import android.os.Looper; 27 import android.os.Parcelable; 28 import android.os.PerformanceCollector; 29 import android.os.PerformanceCollector.PerformanceResultsWriter; 30 import android.test.suitebuilder.TestMethod; 31 import android.test.suitebuilder.TestPredicates; 32 import android.test.suitebuilder.TestSuiteBuilder; 33 import android.test.suitebuilder.annotation.HasAnnotation; 34 import android.test.suitebuilder.annotation.LargeTest; 35 import android.util.Log; 36 37 import java.io.ByteArrayOutputStream; 38 import java.io.File; 39 import java.io.PrintStream; 40 import java.lang.annotation.Annotation; 41 import java.lang.reflect.InvocationTargetException; 42 import java.lang.reflect.Method; 43 import java.util.ArrayList; 44 import java.util.List; 45 46 import junit.framework.AssertionFailedError; 47 import junit.framework.Test; 48 import junit.framework.TestCase; 49 import junit.framework.TestListener; 50 import junit.framework.TestResult; 51 import junit.framework.TestSuite; 52 import junit.runner.BaseTestRunner; 53 import junit.textui.ResultPrinter; 54 55 /** 56 * An {@link Instrumentation} that runs various types of {@link junit.framework.TestCase}s against 57 * an Android package (application). 58 * 59 * <div class="special reference"> 60 * <h3>Developer Guides</h3> 61 * <p>For more information about application testing, read the 62 * <a href="{@docRoot}guide/topics/testing/index.html">Testing</a> developer guide.</p> 63 * </div> 64 * 65 * <h3>Typical Usage</h3> 66 * <ol> 67 * <li>Write {@link junit.framework.TestCase}s that perform unit, functional, or performance tests 68 * against the classes in your package. Typically these are subclassed from: 69 * <ul><li>{@link android.test.ActivityInstrumentationTestCase2}</li> 70 * <li>{@link android.test.ActivityUnitTestCase}</li> 71 * <li>{@link android.test.AndroidTestCase}</li> 72 * <li>{@link android.test.ApplicationTestCase}</li> 73 * <li>{@link android.test.InstrumentationTestCase}</li> 74 * <li>{@link android.test.ProviderTestCase}</li> 75 * <li>{@link android.test.ServiceTestCase}</li> 76 * <li>{@link android.test.SingleLaunchActivityTestCase}</li></ul> 77 * <li>Set the <code>android:targetPackage</code> attribute of the <code><instrumentation></code> 78 * element in the test package's manifest. You should set the attribute value 79 * to the package name of the target application under test. 80 * <li>Run the instrumentation using "adb shell am instrument -w", 81 * with no optional arguments, to run all tests (except performance tests). 82 * <li>Run the instrumentation using "adb shell am instrument -w", 83 * with the argument '-e func true' to run all functional tests. These are tests that derive from 84 * {@link android.test.InstrumentationTestCase}. 85 * <li>Run the instrumentation using "adb shell am instrument -w", 86 * with the argument '-e unit true' to run all unit tests. These are tests that <i>do not</i>derive 87 * from {@link android.test.InstrumentationTestCase} (and are not performance tests). 88 * <li>Run the instrumentation using "adb shell am instrument -w", 89 * with the argument '-e class' set to run an individual {@link junit.framework.TestCase}. 90 * </ol> 91 * <p/> 92 * <b>Running all tests:</b> adb shell am instrument -w 93 * com.android.foo/android.test.InstrumentationTestRunner 94 * <p/> 95 * <b>Running all small tests:</b> adb shell am instrument -w 96 * -e size small 97 * com.android.foo/android.test.InstrumentationTestRunner 98 * <p/> 99 * <b>Running all medium tests:</b> adb shell am instrument -w 100 * -e size medium 101 * com.android.foo/android.test.InstrumentationTestRunner 102 * <p/> 103 * <b>Running all large tests:</b> adb shell am instrument -w 104 * -e size large 105 * com.android.foo/android.test.InstrumentationTestRunner 106 * <p/> 107 * <b>Filter test run to tests with given annotation:</b> adb shell am instrument -w 108 * -e annotation com.android.foo.MyAnnotation 109 * com.android.foo/android.test.InstrumentationTestRunner 110 * <p/> 111 * If used with other options, the resulting test run will contain the union of the two options. 112 * e.g. "-e size large -e annotation com.android.foo.MyAnnotation" will run only tests with both 113 * the {@link LargeTest} and "com.android.foo.MyAnnotation" annotations. 114 * <p/> 115 * <b>Filter test run to tests <i>without</i> given annotation:</b> adb shell am instrument -w 116 * -e notAnnotation com.android.foo.MyAnnotation 117 * com.android.foo/android.test.InstrumentationTestRunner 118 * <p/> 119 * <b>Running a single testcase:</b> adb shell am instrument -w 120 * -e class com.android.foo.FooTest 121 * com.android.foo/android.test.InstrumentationTestRunner 122 * <p/> 123 * <b>Running a single test:</b> adb shell am instrument -w 124 * -e class com.android.foo.FooTest#testFoo 125 * com.android.foo/android.test.InstrumentationTestRunner 126 * <p/> 127 * <b>Running multiple tests:</b> adb shell am instrument -w 128 * -e class com.android.foo.FooTest,com.android.foo.TooTest 129 * com.android.foo/android.test.InstrumentationTestRunner 130 * <p/> 131 * <b>Running all tests in a java package:</b> adb shell am instrument -w 132 * -e package com.android.foo.subpkg 133 * com.android.foo/android.test.InstrumentationTestRunner 134 * <p/> 135 * <b>Including performance tests:</b> adb shell am instrument -w 136 * -e perf true 137 * com.android.foo/android.test.InstrumentationTestRunner 138 * <p/> 139 * <b>To debug your tests, set a break point in your code and pass:</b> 140 * -e debug true 141 * <p/> 142 * <b>To run in 'log only' mode</b> 143 * -e log true 144 * This option will load and iterate through all test classes and methods, but will bypass actual 145 * test execution. Useful for quickly obtaining info on the tests to be executed by an 146 * instrumentation command. 147 * <p/> 148 * <b>To generate EMMA code coverage:</b> 149 * -e coverage true 150 * Note: this requires an emma instrumented build. By default, the code coverage results file 151 * will be saved in a /data/<app>/coverage.ec file, unless overridden by coverageFile flag (see 152 * below) 153 * <p/> 154 * <b> To specify EMMA code coverage results file path:</b> 155 * -e coverageFile /sdcard/myFile.ec 156 * <br/> 157 * in addition to the other arguments. 158 * @deprecated Use 159 * <a href="{@docRoot}reference/android/support/test/runner/AndroidJUnitRunner.html"> 160 * AndroidJUnitRunner</a> instead. New tests should be written using the 161 * <a href="{@docRoot}tools/testing-support-library/index.html">Android Testing Support Library</a>. 162 */ 163 164 /* (not JavaDoc) 165 * Although not necessary in most case, another way to use this class is to extend it and have the 166 * derived class return the desired test suite from the {@link #getTestSuite()} method. The test 167 * suite returned from this method will be used if no target class is defined in the meta-data or 168 * command line argument parameters. If a derived class is used it needs to be added as an 169 * instrumentation to the AndroidManifest.xml and the command to run it would look like: 170 * <p/> 171 * adb shell am instrument -w com.android.foo/<i>com.android.FooInstrumentationTestRunner</i> 172 * <p/> 173 * Where <i>com.android.FooInstrumentationTestRunner</i> is the derived class. 174 * 175 * This model is used by many existing app tests, but can probably be deprecated. 176 */ 177 @Deprecated 178 public class InstrumentationTestRunner extends Instrumentation implements TestSuiteProvider { 179 180 /** @hide */ 181 public static final String ARGUMENT_TEST_CLASS = "class"; 182 /** @hide */ 183 public static final String ARGUMENT_TEST_PACKAGE = "package"; 184 /** @hide */ 185 public static final String ARGUMENT_TEST_SIZE_PREDICATE = "size"; 186 /** @hide */ 187 public static final String ARGUMENT_DELAY_MSEC = "delay_msec"; 188 189 private static final String SMALL_SUITE = "small"; 190 private static final String MEDIUM_SUITE = "medium"; 191 private static final String LARGE_SUITE = "large"; 192 193 private static final String ARGUMENT_LOG_ONLY = "log"; 194 /** @hide */ 195 static final String ARGUMENT_ANNOTATION = "annotation"; 196 /** @hide */ 197 static final String ARGUMENT_NOT_ANNOTATION = "notAnnotation"; 198 199 /** 200 * This constant defines the maximum allowed runtime (in ms) for a test included in the "small" 201 * suite. It is used to make an educated guess at what suite an unlabeled test belongs. 202 */ 203 private static final float SMALL_SUITE_MAX_RUNTIME = 100; 204 205 /** 206 * This constant defines the maximum allowed runtime (in ms) for a test included in the 207 * "medium" suite. It is used to make an educated guess at what suite an unlabeled test belongs. 208 */ 209 private static final float MEDIUM_SUITE_MAX_RUNTIME = 1000; 210 211 /** 212 * The following keys are used in the status bundle to provide structured reports to 213 * an IInstrumentationWatcher. 214 */ 215 216 /** 217 * This value, if stored with key {@link android.app.Instrumentation#REPORT_KEY_IDENTIFIER}, 218 * identifies InstrumentationTestRunner as the source of the report. This is sent with all 219 * status messages. 220 */ 221 public static final String REPORT_VALUE_ID = "InstrumentationTestRunner"; 222 /** 223 * If included in the status or final bundle sent to an IInstrumentationWatcher, this key 224 * identifies the total number of tests that are being run. This is sent with all status 225 * messages. 226 */ 227 public static final String REPORT_KEY_NUM_TOTAL = "numtests"; 228 /** 229 * If included in the status or final bundle sent to an IInstrumentationWatcher, this key 230 * identifies the sequence number of the current test. This is sent with any status message 231 * describing a specific test being started or completed. 232 */ 233 public static final String REPORT_KEY_NUM_CURRENT = "current"; 234 /** 235 * If included in the status or final bundle sent to an IInstrumentationWatcher, this key 236 * identifies the name of the current test class. This is sent with any status message 237 * describing a specific test being started or completed. 238 */ 239 public static final String REPORT_KEY_NAME_CLASS = "class"; 240 /** 241 * If included in the status or final bundle sent to an IInstrumentationWatcher, this key 242 * identifies the name of the current test. This is sent with any status message 243 * describing a specific test being started or completed. 244 */ 245 public static final String REPORT_KEY_NAME_TEST = "test"; 246 /** 247 * If included in the status or final bundle sent to an IInstrumentationWatcher, this key 248 * reports the run time in seconds of the current test. 249 */ 250 private static final String REPORT_KEY_RUN_TIME = "runtime"; 251 /** 252 * If included in the status or final bundle sent to an IInstrumentationWatcher, this key 253 * reports the number of total iterations of the current test. 254 */ 255 private static final String REPORT_KEY_NUM_ITERATIONS = "numiterations"; 256 /** 257 * If included in the status or final bundle sent to an IInstrumentationWatcher, this key 258 * reports the guessed suite assignment for the current test. 259 */ 260 private static final String REPORT_KEY_SUITE_ASSIGNMENT = "suiteassignment"; 261 /** 262 * If included in the status or final bundle sent to an IInstrumentationWatcher, this key 263 * identifies the path to the generated code coverage file. 264 */ 265 private static final String REPORT_KEY_COVERAGE_PATH = "coverageFilePath"; 266 267 /** 268 * The test is starting. 269 */ 270 public static final int REPORT_VALUE_RESULT_START = 1; 271 /** 272 * The test completed successfully. 273 */ 274 public static final int REPORT_VALUE_RESULT_OK = 0; 275 /** 276 * The test completed with an error. 277 */ 278 public static final int REPORT_VALUE_RESULT_ERROR = -1; 279 /** 280 * The test completed with a failure. 281 */ 282 public static final int REPORT_VALUE_RESULT_FAILURE = -2; 283 /** 284 * If included in the status bundle sent to an IInstrumentationWatcher, this key 285 * identifies a stack trace describing an error or failure. This is sent with any status 286 * message describing a specific test being completed. 287 */ 288 public static final String REPORT_KEY_STACK = "stack"; 289 290 // Default file name for code coverage 291 private static final String DEFAULT_COVERAGE_FILE_NAME = "coverage.ec"; 292 293 private static final String LOG_TAG = "InstrumentationTestRunner"; 294 295 private final Bundle mResults = new Bundle(); 296 private Bundle mArguments; 297 private AndroidTestRunner mTestRunner; 298 private boolean mDebug; 299 private boolean mJustCount; 300 private boolean mSuiteAssignmentMode; 301 private int mTestCount; 302 private String mPackageOfTests; 303 private boolean mCoverage; 304 private String mCoverageFilePath; 305 private int mDelayMsec; 306 307 @Override onCreate(Bundle arguments)308 public void onCreate(Bundle arguments) { 309 super.onCreate(arguments); 310 mArguments = arguments; 311 312 // Apk paths used to search for test classes when using TestSuiteBuilders. 313 String[] apkPaths = 314 {getTargetContext().getPackageCodePath(), getContext().getPackageCodePath()}; 315 ClassPathPackageInfoSource.setApkPaths(apkPaths); 316 317 Predicate<TestMethod> testSizePredicate = null; 318 Predicate<TestMethod> testAnnotationPredicate = null; 319 Predicate<TestMethod> testNotAnnotationPredicate = null; 320 String testClassesArg = null; 321 boolean logOnly = false; 322 323 if (arguments != null) { 324 // Test class name passed as an argument should override any meta-data declaration. 325 testClassesArg = arguments.getString(ARGUMENT_TEST_CLASS); 326 mDebug = getBooleanArgument(arguments, "debug"); 327 mJustCount = getBooleanArgument(arguments, "count"); 328 mSuiteAssignmentMode = getBooleanArgument(arguments, "suiteAssignment"); 329 mPackageOfTests = arguments.getString(ARGUMENT_TEST_PACKAGE); 330 testSizePredicate = getSizePredicateFromArg( 331 arguments.getString(ARGUMENT_TEST_SIZE_PREDICATE)); 332 testAnnotationPredicate = getAnnotationPredicate( 333 arguments.getString(ARGUMENT_ANNOTATION)); 334 testNotAnnotationPredicate = getNotAnnotationPredicate( 335 arguments.getString(ARGUMENT_NOT_ANNOTATION)); 336 337 logOnly = getBooleanArgument(arguments, ARGUMENT_LOG_ONLY); 338 mCoverage = getBooleanArgument(arguments, "coverage"); 339 mCoverageFilePath = arguments.getString("coverageFile"); 340 341 try { 342 Object delay = arguments.get(ARGUMENT_DELAY_MSEC); // Accept either string or int 343 if (delay != null) mDelayMsec = Integer.parseInt(delay.toString()); 344 } catch (NumberFormatException e) { 345 Log.e(LOG_TAG, "Invalid delay_msec parameter", e); 346 } 347 } 348 349 TestSuiteBuilder testSuiteBuilder = new TestSuiteBuilder(getClass().getName(), 350 getTargetContext().getClassLoader()); 351 352 if (testSizePredicate != null) { 353 testSuiteBuilder.addRequirements(testSizePredicate); 354 } 355 if (testAnnotationPredicate != null) { 356 testSuiteBuilder.addRequirements(testAnnotationPredicate); 357 } 358 if (testNotAnnotationPredicate != null) { 359 testSuiteBuilder.addRequirements(testNotAnnotationPredicate); 360 } 361 362 if (testClassesArg == null) { 363 if (mPackageOfTests != null) { 364 testSuiteBuilder.includePackages(mPackageOfTests); 365 } else { 366 TestSuite testSuite = getTestSuite(); 367 if (testSuite != null) { 368 testSuiteBuilder.addTestSuite(testSuite); 369 } else { 370 // no package or class bundle arguments were supplied, and no test suite 371 // provided so add all tests in application 372 testSuiteBuilder.includePackages(""); 373 } 374 } 375 } else { 376 parseTestClasses(testClassesArg, testSuiteBuilder); 377 } 378 379 testSuiteBuilder.addRequirements(getBuilderRequirements()); 380 381 mTestRunner = getAndroidTestRunner(); 382 mTestRunner.setContext(getTargetContext()); 383 mTestRunner.setInstrumentation(this); 384 mTestRunner.setSkipExecution(logOnly); 385 mTestRunner.setTest(testSuiteBuilder.build()); 386 mTestCount = mTestRunner.getTestCases().size(); 387 if (mSuiteAssignmentMode) { 388 mTestRunner.addTestListener(new SuiteAssignmentPrinter()); 389 } else { 390 WatcherResultPrinter resultPrinter = new WatcherResultPrinter(mTestCount); 391 mTestRunner.addTestListener(new TestPrinter("TestRunner", false)); 392 mTestRunner.addTestListener(resultPrinter); 393 mTestRunner.setPerformanceResultsWriter(resultPrinter); 394 } 395 start(); 396 } 397 398 /** 399 * Get the arguments passed to this instrumentation. 400 * 401 * @return the Bundle object 402 */ getArguments()403 public Bundle getArguments() { 404 return mArguments; 405 } 406 407 /** 408 * Add a {@link TestListener} 409 * @hide 410 */ addTestListener(TestListener listener)411 protected void addTestListener(TestListener listener){ 412 if(mTestRunner!=null && listener!=null){ 413 mTestRunner.addTestListener(listener); 414 } 415 } 416 getBuilderRequirements()417 List<Predicate<TestMethod>> getBuilderRequirements() { 418 return new ArrayList<Predicate<TestMethod>>(); 419 } 420 421 /** 422 * Parses and loads the specified set of test classes 423 * 424 * @param testClassArg - comma-separated list of test classes and methods 425 * @param testSuiteBuilder - builder to add tests to 426 */ parseTestClasses(String testClassArg, TestSuiteBuilder testSuiteBuilder)427 private void parseTestClasses(String testClassArg, TestSuiteBuilder testSuiteBuilder) { 428 String[] testClasses = testClassArg.split(","); 429 for (String testClass : testClasses) { 430 parseTestClass(testClass, testSuiteBuilder); 431 } 432 } 433 434 /** 435 * Parse and load the given test class and, optionally, method 436 * 437 * @param testClassName - full package name of test class and optionally method to add. 438 * Expected format: com.android.TestClass#testMethod 439 * @param testSuiteBuilder - builder to add tests to 440 */ parseTestClass(String testClassName, TestSuiteBuilder testSuiteBuilder)441 private void parseTestClass(String testClassName, TestSuiteBuilder testSuiteBuilder) { 442 int methodSeparatorIndex = testClassName.indexOf('#'); 443 String testMethodName = null; 444 445 if (methodSeparatorIndex > 0) { 446 testMethodName = testClassName.substring(methodSeparatorIndex + 1); 447 testClassName = testClassName.substring(0, methodSeparatorIndex); 448 } 449 testSuiteBuilder.addTestClassByName(testClassName, testMethodName, getTargetContext()); 450 } 451 getAndroidTestRunner()452 protected AndroidTestRunner getAndroidTestRunner() { 453 return new AndroidTestRunner(); 454 } 455 getBooleanArgument(Bundle arguments, String tag)456 private boolean getBooleanArgument(Bundle arguments, String tag) { 457 String tagString = arguments.getString(tag); 458 return tagString != null && Boolean.parseBoolean(tagString); 459 } 460 461 /* 462 * Returns the size predicate object, corresponding to the "size" argument value. 463 */ getSizePredicateFromArg(String sizeArg)464 private Predicate<TestMethod> getSizePredicateFromArg(String sizeArg) { 465 466 if (SMALL_SUITE.equals(sizeArg)) { 467 return TestPredicates.SELECT_SMALL; 468 } else if (MEDIUM_SUITE.equals(sizeArg)) { 469 return TestPredicates.SELECT_MEDIUM; 470 } else if (LARGE_SUITE.equals(sizeArg)) { 471 return TestPredicates.SELECT_LARGE; 472 } else { 473 return null; 474 } 475 } 476 477 /** 478 * Returns the test predicate object, corresponding to the annotation class value provided via 479 * the {@link ARGUMENT_ANNOTATION} argument. 480 * 481 * @return the predicate or <code>null</code> 482 */ getAnnotationPredicate(String annotationClassName)483 private Predicate<TestMethod> getAnnotationPredicate(String annotationClassName) { 484 Class<? extends Annotation> annotationClass = getAnnotationClass(annotationClassName); 485 if (annotationClass != null) { 486 return new HasAnnotation(annotationClass); 487 } 488 return null; 489 } 490 491 /** 492 * Returns the negative test predicate object, corresponding to the annotation class value 493 * provided via the {@link ARGUMENT_NOT_ANNOTATION} argument. 494 * 495 * @return the predicate or <code>null</code> 496 */ getNotAnnotationPredicate(String annotationClassName)497 private Predicate<TestMethod> getNotAnnotationPredicate(String annotationClassName) { 498 Class<? extends Annotation> annotationClass = getAnnotationClass(annotationClassName); 499 if (annotationClass != null) { 500 return Predicates.not(new HasAnnotation(annotationClass)); 501 } 502 return null; 503 } 504 505 /** 506 * Helper method to return the annotation class with specified name 507 * 508 * @param annotationClassName the fully qualified name of the class 509 * @return the annotation class or <code>null</code> 510 */ getAnnotationClass(String annotationClassName)511 private Class<? extends Annotation> getAnnotationClass(String annotationClassName) { 512 if (annotationClassName == null) { 513 return null; 514 } 515 try { 516 Class<?> annotationClass = Class.forName(annotationClassName); 517 if (annotationClass.isAnnotation()) { 518 return (Class<? extends Annotation>)annotationClass; 519 } else { 520 Log.e(LOG_TAG, String.format("Provided annotation value %s is not an Annotation", 521 annotationClassName)); 522 } 523 } catch (ClassNotFoundException e) { 524 Log.e(LOG_TAG, String.format("Could not find class for specified annotation %s", 525 annotationClassName)); 526 } 527 return null; 528 } 529 530 /** 531 * Initialize the current thread as a looper. 532 * <p/> 533 * Exposed for unit testing. 534 */ prepareLooper()535 void prepareLooper() { 536 Looper.prepare(); 537 } 538 539 @Override onStart()540 public void onStart() { 541 prepareLooper(); 542 543 if (mJustCount) { 544 mResults.putString(Instrumentation.REPORT_KEY_IDENTIFIER, REPORT_VALUE_ID); 545 mResults.putInt(REPORT_KEY_NUM_TOTAL, mTestCount); 546 finish(Activity.RESULT_OK, mResults); 547 } else { 548 if (mDebug) { 549 Debug.waitForDebugger(); 550 } 551 552 ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream(); 553 PrintStream writer = new PrintStream(byteArrayOutputStream); 554 try { 555 StringResultPrinter resultPrinter = new StringResultPrinter(writer); 556 557 mTestRunner.addTestListener(resultPrinter); 558 559 long startTime = System.currentTimeMillis(); 560 mTestRunner.runTest(); 561 long runTime = System.currentTimeMillis() - startTime; 562 563 resultPrinter.printResult(mTestRunner.getTestResult(), runTime); 564 } catch (Throwable t) { 565 // catch all exceptions so a more verbose error message can be outputted 566 writer.println(String.format("Test run aborted due to unexpected exception: %s", 567 t.getMessage())); 568 t.printStackTrace(writer); 569 } finally { 570 mResults.putString(Instrumentation.REPORT_KEY_STREAMRESULT, 571 String.format("\nTest results for %s=%s", 572 mTestRunner.getTestClassName(), 573 byteArrayOutputStream.toString())); 574 575 if (mCoverage) { 576 generateCoverageReport(); 577 } 578 writer.close(); 579 580 finish(Activity.RESULT_OK, mResults); 581 } 582 } 583 } 584 getTestSuite()585 public TestSuite getTestSuite() { 586 return getAllTests(); 587 } 588 589 /** 590 * Override this to define all of the tests to run in your package. 591 */ getAllTests()592 public TestSuite getAllTests() { 593 return null; 594 } 595 596 /** 597 * Override this to provide access to the class loader of your package. 598 */ getLoader()599 public ClassLoader getLoader() { 600 return null; 601 } 602 generateCoverageReport()603 private void generateCoverageReport() { 604 // use reflection to call emma dump coverage method, to avoid 605 // always statically compiling against emma jar 606 String coverageFilePath = getCoverageFilePath(); 607 java.io.File coverageFile = new java.io.File(coverageFilePath); 608 try { 609 Class<?> emmaRTClass = Class.forName("com.vladium.emma.rt.RT"); 610 Method dumpCoverageMethod = emmaRTClass.getMethod("dumpCoverageData", 611 coverageFile.getClass(), boolean.class, boolean.class); 612 613 dumpCoverageMethod.invoke(null, coverageFile, false, false); 614 // output path to generated coverage file so it can be parsed by a test harness if 615 // needed 616 mResults.putString(REPORT_KEY_COVERAGE_PATH, coverageFilePath); 617 // also output a more user friendly msg 618 final String currentStream = mResults.getString( 619 Instrumentation.REPORT_KEY_STREAMRESULT); 620 mResults.putString(Instrumentation.REPORT_KEY_STREAMRESULT, 621 String.format("%s\nGenerated code coverage data to %s", currentStream, 622 coverageFilePath)); 623 } catch (ClassNotFoundException e) { 624 reportEmmaError("Is emma jar on classpath?", e); 625 } catch (SecurityException e) { 626 reportEmmaError(e); 627 } catch (NoSuchMethodException e) { 628 reportEmmaError(e); 629 } catch (IllegalArgumentException e) { 630 reportEmmaError(e); 631 } catch (IllegalAccessException e) { 632 reportEmmaError(e); 633 } catch (InvocationTargetException e) { 634 reportEmmaError(e); 635 } 636 } 637 getCoverageFilePath()638 private String getCoverageFilePath() { 639 if (mCoverageFilePath == null) { 640 return getTargetContext().getFilesDir().getAbsolutePath() + File.separator + 641 DEFAULT_COVERAGE_FILE_NAME; 642 } else { 643 return mCoverageFilePath; 644 } 645 } 646 reportEmmaError(Exception e)647 private void reportEmmaError(Exception e) { 648 reportEmmaError("", e); 649 } 650 reportEmmaError(String hint, Exception e)651 private void reportEmmaError(String hint, Exception e) { 652 String msg = "Failed to generate emma coverage. " + hint; 653 Log.e(LOG_TAG, msg, e); 654 mResults.putString(Instrumentation.REPORT_KEY_STREAMRESULT, "\nError: " + msg); 655 } 656 657 // TODO kill this, use status() and prettyprint model for better output 658 private class StringResultPrinter extends ResultPrinter { 659 StringResultPrinter(PrintStream writer)660 public StringResultPrinter(PrintStream writer) { 661 super(writer); 662 } 663 printResult(TestResult result, long runTime)664 public synchronized void printResult(TestResult result, long runTime) { 665 printHeader(runTime); 666 printFooter(result); 667 } 668 } 669 670 /** 671 * This class sends status reports back to the IInstrumentationWatcher about 672 * which suite each test belongs. 673 */ 674 private class SuiteAssignmentPrinter implements TestListener { 675 676 private Bundle mTestResult; 677 private long mStartTime; 678 private long mEndTime; 679 private boolean mTimingValid; 680 SuiteAssignmentPrinter()681 public SuiteAssignmentPrinter() { 682 } 683 684 /** 685 * send a status for the start of a each test, so long tests can be seen as "running" 686 */ startTest(Test test)687 public void startTest(Test test) { 688 mTimingValid = true; 689 mStartTime = System.currentTimeMillis(); 690 } 691 692 /** 693 * @see junit.framework.TestListener#addError(Test, Throwable) 694 */ addError(Test test, Throwable t)695 public void addError(Test test, Throwable t) { 696 mTimingValid = false; 697 } 698 699 /** 700 * @see junit.framework.TestListener#addFailure(Test, AssertionFailedError) 701 */ addFailure(Test test, AssertionFailedError t)702 public void addFailure(Test test, AssertionFailedError t) { 703 mTimingValid = false; 704 } 705 706 /** 707 * @see junit.framework.TestListener#endTest(Test) 708 */ endTest(Test test)709 public void endTest(Test test) { 710 float runTime; 711 String assignmentSuite; 712 mEndTime = System.currentTimeMillis(); 713 mTestResult = new Bundle(); 714 715 if (!mTimingValid || mStartTime < 0) { 716 assignmentSuite = "NA"; 717 runTime = -1; 718 } else { 719 runTime = mEndTime - mStartTime; 720 if (runTime < SMALL_SUITE_MAX_RUNTIME 721 && !InstrumentationTestCase.class.isAssignableFrom(test.getClass())) { 722 assignmentSuite = SMALL_SUITE; 723 } else if (runTime < MEDIUM_SUITE_MAX_RUNTIME) { 724 assignmentSuite = MEDIUM_SUITE; 725 } else { 726 assignmentSuite = LARGE_SUITE; 727 } 728 } 729 // Clear mStartTime so that we can verify that it gets set next time. 730 mStartTime = -1; 731 732 mTestResult.putString(Instrumentation.REPORT_KEY_STREAMRESULT, 733 test.getClass().getName() + "#" + ((TestCase) test).getName() 734 + "\nin " + assignmentSuite + " suite\nrunTime: " 735 + String.valueOf(runTime) + "\n"); 736 mTestResult.putFloat(REPORT_KEY_RUN_TIME, runTime); 737 mTestResult.putString(REPORT_KEY_SUITE_ASSIGNMENT, assignmentSuite); 738 739 sendStatus(0, mTestResult); 740 } 741 } 742 743 /** 744 * This class sends status reports back to the IInstrumentationWatcher 745 */ 746 private class WatcherResultPrinter implements TestListener, PerformanceResultsWriter { 747 private final Bundle mResultTemplate; 748 Bundle mTestResult; 749 int mTestNum = 0; 750 int mTestResultCode = 0; 751 String mTestClass = null; 752 PerformanceCollector mPerfCollector = new PerformanceCollector(); 753 boolean mIsTimedTest = false; 754 boolean mIncludeDetailedStats = false; 755 WatcherResultPrinter(int numTests)756 public WatcherResultPrinter(int numTests) { 757 mResultTemplate = new Bundle(); 758 mResultTemplate.putString(Instrumentation.REPORT_KEY_IDENTIFIER, REPORT_VALUE_ID); 759 mResultTemplate.putInt(REPORT_KEY_NUM_TOTAL, numTests); 760 } 761 762 /** 763 * send a status for the start of a each test, so long tests can be seen 764 * as "running" 765 */ startTest(Test test)766 public void startTest(Test test) { 767 String testClass = test.getClass().getName(); 768 String testName = ((TestCase)test).getName(); 769 mTestResult = new Bundle(mResultTemplate); 770 mTestResult.putString(REPORT_KEY_NAME_CLASS, testClass); 771 mTestResult.putString(REPORT_KEY_NAME_TEST, testName); 772 mTestResult.putInt(REPORT_KEY_NUM_CURRENT, ++mTestNum); 773 // pretty printing 774 if (testClass != null && !testClass.equals(mTestClass)) { 775 mTestResult.putString(Instrumentation.REPORT_KEY_STREAMRESULT, 776 String.format("\n%s:", testClass)); 777 mTestClass = testClass; 778 } else { 779 mTestResult.putString(Instrumentation.REPORT_KEY_STREAMRESULT, ""); 780 } 781 782 Method testMethod = null; 783 try { 784 testMethod = test.getClass().getMethod(testName); 785 // Report total number of iterations, if test is repetitive 786 if (testMethod.isAnnotationPresent(RepetitiveTest.class)) { 787 int numIterations = testMethod.getAnnotation( 788 RepetitiveTest.class).numIterations(); 789 mTestResult.putInt(REPORT_KEY_NUM_ITERATIONS, numIterations); 790 } 791 } catch (NoSuchMethodException e) { 792 // ignore- the test with given name does not exist. Will be handled during test 793 // execution 794 } 795 796 // The delay_msec parameter is normally used to provide buffers of idle time 797 // for power measurement purposes. To make sure there is a delay before and after 798 // every test in a suite, we delay *after* every test (see endTest below) and also 799 // delay *before* the first test. So, delay test1 delay test2 delay. 800 801 try { 802 if (mTestNum == 1) Thread.sleep(mDelayMsec); 803 } catch (InterruptedException e) { 804 throw new IllegalStateException(e); 805 } 806 807 sendStatus(REPORT_VALUE_RESULT_START, mTestResult); 808 mTestResultCode = 0; 809 810 mIsTimedTest = false; 811 mIncludeDetailedStats = false; 812 try { 813 // Look for TimedTest annotation on both test class and test method 814 if (testMethod != null && testMethod.isAnnotationPresent(TimedTest.class)) { 815 mIsTimedTest = true; 816 mIncludeDetailedStats = testMethod.getAnnotation( 817 TimedTest.class).includeDetailedStats(); 818 } else if (test.getClass().isAnnotationPresent(TimedTest.class)) { 819 mIsTimedTest = true; 820 mIncludeDetailedStats = test.getClass().getAnnotation( 821 TimedTest.class).includeDetailedStats(); 822 } 823 } catch (SecurityException e) { 824 // ignore - the test with given name cannot be accessed. Will be handled during 825 // test execution 826 } 827 828 if (mIsTimedTest && mIncludeDetailedStats) { 829 mPerfCollector.beginSnapshot(""); 830 } else if (mIsTimedTest) { 831 mPerfCollector.startTiming(""); 832 } 833 } 834 835 /** 836 * @see junit.framework.TestListener#addError(Test, Throwable) 837 */ addError(Test test, Throwable t)838 public void addError(Test test, Throwable t) { 839 mTestResult.putString(REPORT_KEY_STACK, BaseTestRunner.getFilteredTrace(t)); 840 mTestResultCode = REPORT_VALUE_RESULT_ERROR; 841 // pretty printing 842 mTestResult.putString(Instrumentation.REPORT_KEY_STREAMRESULT, 843 String.format("\nError in %s:\n%s", 844 ((TestCase)test).getName(), BaseTestRunner.getFilteredTrace(t))); 845 } 846 847 /** 848 * @see junit.framework.TestListener#addFailure(Test, AssertionFailedError) 849 */ addFailure(Test test, AssertionFailedError t)850 public void addFailure(Test test, AssertionFailedError t) { 851 mTestResult.putString(REPORT_KEY_STACK, BaseTestRunner.getFilteredTrace(t)); 852 mTestResultCode = REPORT_VALUE_RESULT_FAILURE; 853 // pretty printing 854 mTestResult.putString(Instrumentation.REPORT_KEY_STREAMRESULT, 855 String.format("\nFailure in %s:\n%s", 856 ((TestCase)test).getName(), BaseTestRunner.getFilteredTrace(t))); 857 } 858 859 /** 860 * @see junit.framework.TestListener#endTest(Test) 861 */ endTest(Test test)862 public void endTest(Test test) { 863 if (mIsTimedTest && mIncludeDetailedStats) { 864 mTestResult.putAll(mPerfCollector.endSnapshot()); 865 } else if (mIsTimedTest) { 866 writeStopTiming(mPerfCollector.stopTiming("")); 867 } 868 869 if (mTestResultCode == 0) { 870 mTestResult.putString(Instrumentation.REPORT_KEY_STREAMRESULT, "."); 871 } 872 sendStatus(mTestResultCode, mTestResult); 873 874 try { // Sleep after every test, if specified 875 Thread.sleep(mDelayMsec); 876 } catch (InterruptedException e) { 877 throw new IllegalStateException(e); 878 } 879 } 880 writeBeginSnapshot(String label)881 public void writeBeginSnapshot(String label) { 882 // Do nothing 883 } 884 writeEndSnapshot(Bundle results)885 public void writeEndSnapshot(Bundle results) { 886 // Copy all snapshot data fields into mResults, which is outputted 887 // via Instrumentation.finish 888 mResults.putAll(results); 889 } 890 writeStartTiming(String label)891 public void writeStartTiming(String label) { 892 // Do nothing 893 } 894 writeStopTiming(Bundle results)895 public void writeStopTiming(Bundle results) { 896 // Copy results into mTestResult by flattening list of iterations, 897 // which is outputted via WatcherResultPrinter.endTest 898 int i = 0; 899 for (Parcelable p : 900 results.getParcelableArrayList(PerformanceCollector.METRIC_KEY_ITERATIONS)) { 901 Bundle iteration = (Bundle)p; 902 String index = "iteration" + i + "."; 903 mTestResult.putString(index + PerformanceCollector.METRIC_KEY_LABEL, 904 iteration.getString(PerformanceCollector.METRIC_KEY_LABEL)); 905 mTestResult.putLong(index + PerformanceCollector.METRIC_KEY_CPU_TIME, 906 iteration.getLong(PerformanceCollector.METRIC_KEY_CPU_TIME)); 907 mTestResult.putLong(index + PerformanceCollector.METRIC_KEY_EXECUTION_TIME, 908 iteration.getLong(PerformanceCollector.METRIC_KEY_EXECUTION_TIME)); 909 i++; 910 } 911 } 912 writeMeasurement(String label, long value)913 public void writeMeasurement(String label, long value) { 914 mTestResult.putLong(label, value); 915 } 916 writeMeasurement(String label, float value)917 public void writeMeasurement(String label, float value) { 918 mTestResult.putFloat(label, value); 919 } 920 writeMeasurement(String label, String value)921 public void writeMeasurement(String label, String value) { 922 mTestResult.putString(label, value); 923 } 924 925 // TODO report the end of the cycle 926 } 927 } 928