• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2007 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 package android.test;
18 
19 import static android.test.suitebuilder.TestPredicates.REJECT_PERFORMANCE;
20 
21 import com.android.internal.util.Predicate;
22 
23 import android.app.Activity;
24 import android.app.Instrumentation;
25 import android.os.Bundle;
26 import android.os.Debug;
27 import android.os.Looper;
28 import android.os.Parcelable;
29 import android.os.PerformanceCollector;
30 import android.os.PerformanceCollector.PerformanceResultsWriter;
31 import android.test.suitebuilder.TestMethod;
32 import android.test.suitebuilder.TestPredicates;
33 import android.test.suitebuilder.TestSuiteBuilder;
34 import android.util.Log;
35 
36 import java.io.ByteArrayOutputStream;
37 import java.io.File;
38 import java.io.PrintStream;
39 import java.lang.reflect.InvocationTargetException;
40 import java.lang.reflect.Method;
41 import java.util.ArrayList;
42 import java.util.List;
43 
44 import junit.framework.AssertionFailedError;
45 import junit.framework.Test;
46 import junit.framework.TestCase;
47 import junit.framework.TestListener;
48 import junit.framework.TestResult;
49 import junit.framework.TestSuite;
50 import junit.runner.BaseTestRunner;
51 import junit.textui.ResultPrinter;
52 
53 /**
54  * An {@link Instrumentation} that runs various types of {@link junit.framework.TestCase}s against
55  * an Android package (application). Typical usage:
56  * <ol>
57  * <li>Write {@link junit.framework.TestCase}s that perform unit, functional, or performance tests
58  * against the classes in your package.  Typically these are subclassed from:
59  *   <ul><li>{@link android.test.ActivityInstrumentationTestCase2}</li>
60  *   <li>{@link android.test.ActivityUnitTestCase}</li>
61  *   <li>{@link android.test.AndroidTestCase}</li>
62  *   <li>{@link android.test.ApplicationTestCase}</li>
63  *   <li>{@link android.test.InstrumentationTestCase}</li>
64  *   <li>{@link android.test.ProviderTestCase}</li>
65  *   <li>{@link android.test.ServiceTestCase}</li>
66  *   <li>{@link android.test.SingleLaunchActivityTestCase}</li></ul>
67  *   <li>In an appropriate AndroidManifest.xml, define the this instrumentation with
68  * the appropriate android:targetPackage set.
69  * <li>Run the instrumentation using "adb shell am instrument -w",
70  * with no optional arguments, to run all tests (except performance tests).
71  * <li>Run the instrumentation using "adb shell am instrument -w",
72  * with the argument '-e func true' to run all functional tests. These are tests that derive from
73  * {@link android.test.InstrumentationTestCase}.
74  * <li>Run the instrumentation using "adb shell am instrument -w",
75  * with the argument '-e unit true' to run all unit tests. These are tests that <i>do not</i>derive
76  * from {@link android.test.InstrumentationTestCase} (and are not performance tests).
77  * <li>Run the instrumentation using "adb shell am instrument -w",
78  * with the argument '-e class' set to run an individual {@link junit.framework.TestCase}.
79  * </ol>
80  * <p/>
81  * <b>Running all tests:</b> adb shell am instrument -w
82  * com.android.foo/android.test.InstrumentationTestRunner
83  * <p/>
84  * <b>Running all small tests:</b> adb shell am instrument -w
85  * -e size small
86  * com.android.foo/android.test.InstrumentationTestRunner
87  * <p/>
88  * <b>Running all medium tests:</b> adb shell am instrument -w
89  * -e size medium
90  * com.android.foo/android.test.InstrumentationTestRunner
91  * <p/>
92  * <b>Running all large tests:</b> adb shell am instrument -w
93  * -e size large
94  * com.android.foo/android.test.InstrumentationTestRunner
95  * <p/>
96  * <b>Running a single testcase:</b> adb shell am instrument -w
97  * -e class com.android.foo.FooTest
98  * com.android.foo/android.test.InstrumentationTestRunner
99  * <p/>
100  * <b>Running a single test:</b> adb shell am instrument -w
101  * -e class com.android.foo.FooTest#testFoo
102  * com.android.foo/android.test.InstrumentationTestRunner
103  * <p/>
104  * <b>Running multiple tests:</b> adb shell am instrument -w
105  * -e class com.android.foo.FooTest,com.android.foo.TooTest
106  * com.android.foo/android.test.InstrumentationTestRunner
107  * <p/>
108  * <b>Including performance tests:</b> adb shell am instrument -w
109  * -e perf true
110  * com.android.foo/android.test.InstrumentationTestRunner
111  * <p/>
112  * <b>To debug your tests, set a break point in your code and pass:</b>
113  * -e debug true
114  * <p/>
115  * <b>To run in 'log only' mode</b>
116  * -e log true
117  * This option will load and iterate through all test classes and methods, but will bypass actual
118  * test execution. Useful for quickly obtaining info on the tests to be executed by an
119  * instrumentation command.
120  * <p/>
121  * <b>To generate EMMA code coverage:</b>
122  * -e coverage true
123  * Note: this requires an emma instrumented build. By default, the code coverage results file
124  * will be saved in a /data/<app>/coverage.ec file, unless overridden by coverageFile flag (see
125  * below)
126  * <p/>
127  * <b> To specify EMMA code coverage results file path:</b>
128  * -e coverageFile /sdcard/myFile.ec
129  * <br/>
130  * in addition to the other arguments.
131  */
132 
133 /* (not JavaDoc)
134  * Although not necessary in most case, another way to use this class is to extend it and have the
135  * derived class return the desired test suite from the {@link #getTestSuite()} method. The test
136  * suite returned from this method will be used if no target class is defined in the meta-data or
137  * command line argument parameters. If a derived class is used it needs to be added as an
138  * instrumentation to the AndroidManifest.xml and the command to run it would look like:
139  * <p/>
140  * adb shell am instrument -w com.android.foo/<i>com.android.FooInstrumentationTestRunner</i>
141  * <p/>
142  * Where <i>com.android.FooInstrumentationTestRunner</i> is the derived class.
143  *
144  * This model is used by many existing app tests, but can probably be deprecated.
145  */
146 public class InstrumentationTestRunner extends Instrumentation implements TestSuiteProvider {
147 
148     /** @hide */
149     public static final String ARGUMENT_TEST_CLASS = "class";
150     /** @hide */
151     public static final String ARGUMENT_TEST_PACKAGE = "package";
152     /** @hide */
153     public static final String ARGUMENT_TEST_SIZE_PREDICATE = "size";
154     /** @hide */
155     public static final String ARGUMENT_INCLUDE_PERF = "perf";
156     /** @hide */
157     public static final String ARGUMENT_DELAY_MSEC = "delay_msec";
158 
159     private static final String SMALL_SUITE = "small";
160     private static final String MEDIUM_SUITE = "medium";
161     private static final String LARGE_SUITE = "large";
162 
163     private static final String ARGUMENT_LOG_ONLY = "log";
164 
165     /**
166      * This constant defines the maximum allowed runtime (in ms) for a test included in the "small"
167      * suite. It is used to make an educated guess at what suite an unlabeled test belongs.
168      */
169     private static final float SMALL_SUITE_MAX_RUNTIME = 100;
170 
171     /**
172      * This constant defines the maximum allowed runtime (in ms) for a test included in the
173      * "medium" suite. It is used to make an educated guess at what suite an unlabeled test belongs.
174      */
175     private static final float MEDIUM_SUITE_MAX_RUNTIME = 1000;
176 
177     /**
178      * The following keys are used in the status bundle to provide structured reports to
179      * an IInstrumentationWatcher.
180      */
181 
182     /**
183      * This value, if stored with key {@link android.app.Instrumentation#REPORT_KEY_IDENTIFIER},
184      * identifies InstrumentationTestRunner as the source of the report.  This is sent with all
185      * status messages.
186      */
187     public static final String REPORT_VALUE_ID = "InstrumentationTestRunner";
188     /**
189      * If included in the status or final bundle sent to an IInstrumentationWatcher, this key
190      * identifies the total number of tests that are being run.  This is sent with all status
191      * messages.
192      */
193     public static final String REPORT_KEY_NUM_TOTAL = "numtests";
194     /**
195      * If included in the status or final bundle sent to an IInstrumentationWatcher, this key
196      * identifies the sequence number of the current test.  This is sent with any status message
197      * describing a specific test being started or completed.
198      */
199     public static final String REPORT_KEY_NUM_CURRENT = "current";
200     /**
201      * If included in the status or final bundle sent to an IInstrumentationWatcher, this key
202      * identifies the name of the current test class.  This is sent with any status message
203      * describing a specific test being started or completed.
204      */
205     public static final String REPORT_KEY_NAME_CLASS = "class";
206     /**
207      * If included in the status or final bundle sent to an IInstrumentationWatcher, this key
208      * identifies the name of the current test.  This is sent with any status message
209      * describing a specific test being started or completed.
210      */
211     public static final String REPORT_KEY_NAME_TEST = "test";
212     /**
213      * If included in the status or final bundle sent to an IInstrumentationWatcher, this key
214      * reports the run time in seconds of the current test.
215      */
216     private static final String REPORT_KEY_RUN_TIME = "runtime";
217     /**
218      * If included in the status or final bundle sent to an IInstrumentationWatcher, this key
219      * reports the guessed suite assignment for the current test.
220      */
221     private static final String REPORT_KEY_SUITE_ASSIGNMENT = "suiteassignment";
222     /**
223      * If included in the status or final bundle sent to an IInstrumentationWatcher, this key
224      * identifies the path to the generated code coverage file.
225      */
226     private static final String REPORT_KEY_COVERAGE_PATH = "coverageFilePath";
227 
228     /**
229      * The test is starting.
230      */
231     public static final int REPORT_VALUE_RESULT_START = 1;
232     /**
233      * The test completed successfully.
234      */
235     public static final int REPORT_VALUE_RESULT_OK = 0;
236     /**
237      * The test completed with an error.
238      */
239     public static final int REPORT_VALUE_RESULT_ERROR = -1;
240     /**
241      * The test completed with a failure.
242      */
243     public static final int REPORT_VALUE_RESULT_FAILURE = -2;
244     /**
245      * If included in the status bundle sent to an IInstrumentationWatcher, this key
246      * identifies a stack trace describing an error or failure.  This is sent with any status
247      * message describing a specific test being completed.
248      */
249     public static final String REPORT_KEY_STACK = "stack";
250 
251     // Default file name for code coverage
252     private static final String DEFAULT_COVERAGE_FILE_NAME = "coverage.ec";
253 
254     private static final String LOG_TAG = "InstrumentationTestRunner";
255 
256     private final Bundle mResults = new Bundle();
257     private AndroidTestRunner mTestRunner;
258     private boolean mDebug;
259     private boolean mJustCount;
260     private boolean mSuiteAssignmentMode;
261     private int mTestCount;
262     private String mPackageOfTests;
263     private boolean mCoverage;
264     private String mCoverageFilePath;
265     private int mDelayMsec;
266 
267     @Override
onCreate(Bundle arguments)268     public void onCreate(Bundle arguments) {
269         super.onCreate(arguments);
270 
271         // Apk paths used to search for test classes when using TestSuiteBuilders.
272         String[] apkPaths =
273                 {getTargetContext().getPackageCodePath(), getContext().getPackageCodePath()};
274         ClassPathPackageInfoSource.setApkPaths(apkPaths);
275 
276         Predicate<TestMethod> testSizePredicate = null;
277         boolean includePerformance = false;
278         String testClassesArg = null;
279         boolean logOnly = false;
280 
281         if (arguments != null) {
282             // Test class name passed as an argument should override any meta-data declaration.
283             testClassesArg = arguments.getString(ARGUMENT_TEST_CLASS);
284             mDebug = getBooleanArgument(arguments, "debug");
285             mJustCount = getBooleanArgument(arguments, "count");
286             mSuiteAssignmentMode = getBooleanArgument(arguments, "suiteAssignment");
287             mPackageOfTests = arguments.getString(ARGUMENT_TEST_PACKAGE);
288             testSizePredicate = getSizePredicateFromArg(
289                     arguments.getString(ARGUMENT_TEST_SIZE_PREDICATE));
290             includePerformance = getBooleanArgument(arguments, ARGUMENT_INCLUDE_PERF);
291             logOnly = getBooleanArgument(arguments, ARGUMENT_LOG_ONLY);
292             mCoverage = getBooleanArgument(arguments, "coverage");
293             mCoverageFilePath = arguments.getString("coverageFile");
294 
295             try {
296                 Object delay = arguments.get(ARGUMENT_DELAY_MSEC);  // Accept either string or int
297                 if (delay != null) mDelayMsec = Integer.parseInt(delay.toString());
298             } catch (NumberFormatException e) {
299                 Log.e(LOG_TAG, "Invalid delay_msec parameter", e);
300             }
301         }
302 
303         TestSuiteBuilder testSuiteBuilder = new TestSuiteBuilder(getClass().getName(),
304                 getTargetContext().getClassLoader());
305 
306         if (testSizePredicate != null) {
307             testSuiteBuilder.addRequirements(testSizePredicate);
308         }
309         if (!includePerformance) {
310             testSuiteBuilder.addRequirements(REJECT_PERFORMANCE);
311         }
312 
313         if (testClassesArg == null) {
314             if (mPackageOfTests != null) {
315                 testSuiteBuilder.includePackages(mPackageOfTests);
316             } else {
317                 TestSuite testSuite = getTestSuite();
318                 if (testSuite != null) {
319                     testSuiteBuilder.addTestSuite(testSuite);
320                 } else {
321                     // no package or class bundle arguments were supplied, and no test suite
322                     // provided so add all tests in application
323                     testSuiteBuilder.includePackages("");
324                 }
325             }
326         } else {
327             parseTestClasses(testClassesArg, testSuiteBuilder);
328         }
329 
330         testSuiteBuilder.addRequirements(getBuilderRequirements());
331 
332         mTestRunner = getAndroidTestRunner();
333         mTestRunner.setContext(getTargetContext());
334         mTestRunner.setInstrumentation(this);
335         mTestRunner.setSkipExecution(logOnly);
336         mTestRunner.setTest(testSuiteBuilder.build());
337         mTestCount = mTestRunner.getTestCases().size();
338         if (mSuiteAssignmentMode) {
339             mTestRunner.addTestListener(new SuiteAssignmentPrinter());
340         } else {
341             WatcherResultPrinter resultPrinter = new WatcherResultPrinter(mTestCount);
342             mTestRunner.addTestListener(new TestPrinter("TestRunner", false));
343             mTestRunner.addTestListener(resultPrinter);
344             mTestRunner.setPerformanceResultsWriter(resultPrinter);
345         }
346         start();
347     }
348 
getBuilderRequirements()349     List<Predicate<TestMethod>> getBuilderRequirements() {
350         return new ArrayList<Predicate<TestMethod>>();
351     }
352 
353     /**
354      * Parses and loads the specified set of test classes
355      *
356      * @param testClassArg - comma-separated list of test classes and methods
357      * @param testSuiteBuilder - builder to add tests to
358      */
parseTestClasses(String testClassArg, TestSuiteBuilder testSuiteBuilder)359     private void parseTestClasses(String testClassArg, TestSuiteBuilder testSuiteBuilder) {
360         String[] testClasses = testClassArg.split(",");
361         for (String testClass : testClasses) {
362             parseTestClass(testClass, testSuiteBuilder);
363         }
364     }
365 
366     /**
367      * Parse and load the given test class and, optionally, method
368      *
369      * @param testClassName - full package name of test class and optionally method to add.
370      *        Expected format: com.android.TestClass#testMethod
371      * @param testSuiteBuilder - builder to add tests to
372      */
parseTestClass(String testClassName, TestSuiteBuilder testSuiteBuilder)373     private void parseTestClass(String testClassName, TestSuiteBuilder testSuiteBuilder) {
374         int methodSeparatorIndex = testClassName.indexOf('#');
375         String testMethodName = null;
376 
377         if (methodSeparatorIndex > 0) {
378             testMethodName = testClassName.substring(methodSeparatorIndex + 1);
379             testClassName = testClassName.substring(0, methodSeparatorIndex);
380         }
381         testSuiteBuilder.addTestClassByName(testClassName, testMethodName, getTargetContext());
382     }
383 
getAndroidTestRunner()384     protected AndroidTestRunner getAndroidTestRunner() {
385         return new AndroidTestRunner();
386     }
387 
getBooleanArgument(Bundle arguments, String tag)388     private boolean getBooleanArgument(Bundle arguments, String tag) {
389         String tagString = arguments.getString(tag);
390         return tagString != null && Boolean.parseBoolean(tagString);
391     }
392 
393     /*
394      * Returns the size predicate object, corresponding to the "size" argument value.
395      */
getSizePredicateFromArg(String sizeArg)396     private Predicate<TestMethod> getSizePredicateFromArg(String sizeArg) {
397 
398         if (SMALL_SUITE.equals(sizeArg)) {
399             return TestPredicates.SELECT_SMALL;
400         } else if (MEDIUM_SUITE.equals(sizeArg)) {
401             return TestPredicates.SELECT_MEDIUM;
402         } else if (LARGE_SUITE.equals(sizeArg)) {
403             return TestPredicates.SELECT_LARGE;
404         } else {
405             return null;
406         }
407     }
408 
409     @Override
onStart()410     public void onStart() {
411         Looper.prepare();
412 
413         if (mJustCount) {
414             mResults.putString(Instrumentation.REPORT_KEY_IDENTIFIER, REPORT_VALUE_ID);
415             mResults.putInt(REPORT_KEY_NUM_TOTAL, mTestCount);
416             finish(Activity.RESULT_OK, mResults);
417         } else {
418             if (mDebug) {
419                 Debug.waitForDebugger();
420             }
421 
422             ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
423             PrintStream writer = new PrintStream(byteArrayOutputStream);
424             try {
425                 StringResultPrinter resultPrinter = new StringResultPrinter(writer);
426 
427                 mTestRunner.addTestListener(resultPrinter);
428 
429                 long startTime = System.currentTimeMillis();
430                 mTestRunner.runTest();
431                 long runTime = System.currentTimeMillis() - startTime;
432 
433                 resultPrinter.print(mTestRunner.getTestResult(), runTime);
434             } finally {
435                 mResults.putString(Instrumentation.REPORT_KEY_STREAMRESULT,
436                         String.format("\nTest results for %s=%s",
437                         mTestRunner.getTestClassName(),
438                         byteArrayOutputStream.toString()));
439 
440                 if (mCoverage) {
441                     generateCoverageReport();
442                 }
443                 writer.close();
444 
445                 finish(Activity.RESULT_OK, mResults);
446             }
447         }
448     }
449 
getTestSuite()450     public TestSuite getTestSuite() {
451         return getAllTests();
452     }
453 
454     /**
455      * Override this to define all of the tests to run in your package.
456      */
getAllTests()457     public TestSuite getAllTests() {
458         return null;
459     }
460 
461     /**
462      * Override this to provide access to the class loader of your package.
463      */
getLoader()464     public ClassLoader getLoader() {
465         return null;
466     }
467 
generateCoverageReport()468     private void generateCoverageReport() {
469         // use reflection to call emma dump coverage method, to avoid
470         // always statically compiling against emma jar
471         String coverageFilePath = getCoverageFilePath();
472         java.io.File coverageFile = new java.io.File(coverageFilePath);
473         try {
474             Class emmaRTClass = Class.forName("com.vladium.emma.rt.RT");
475             Method dumpCoverageMethod = emmaRTClass.getMethod("dumpCoverageData",
476                     coverageFile.getClass(), boolean.class, boolean.class);
477 
478             dumpCoverageMethod.invoke(null, coverageFile, false, false);
479             // output path to generated coverage file so it can be parsed by a test harness if
480             // needed
481             mResults.putString(REPORT_KEY_COVERAGE_PATH, coverageFilePath);
482             // also output a more user friendly msg
483             mResults.putString(Instrumentation.REPORT_KEY_STREAMRESULT,
484                 String.format("Generated code coverage data to %s", coverageFilePath));
485         } catch (ClassNotFoundException e) {
486             reportEmmaError("Is emma jar on classpath?", e);
487         } catch (SecurityException e) {
488             reportEmmaError(e);
489         } catch (NoSuchMethodException e) {
490             reportEmmaError(e);
491         } catch (IllegalArgumentException e) {
492             reportEmmaError(e);
493         } catch (IllegalAccessException e) {
494             reportEmmaError(e);
495         } catch (InvocationTargetException e) {
496             reportEmmaError(e);
497         }
498     }
499 
getCoverageFilePath()500     private String getCoverageFilePath() {
501         if (mCoverageFilePath == null) {
502             return getTargetContext().getFilesDir().getAbsolutePath() + File.separator +
503                    DEFAULT_COVERAGE_FILE_NAME;
504         } else {
505             return mCoverageFilePath;
506         }
507     }
508 
reportEmmaError(Exception e)509     private void reportEmmaError(Exception e) {
510         reportEmmaError("", e);
511     }
512 
reportEmmaError(String hint, Exception e)513     private void reportEmmaError(String hint, Exception e) {
514         String msg = "Failed to generate emma coverage. " + hint;
515         Log.e(LOG_TAG, msg, e);
516         mResults.putString(Instrumentation.REPORT_KEY_STREAMRESULT, "\nError: " + msg);
517     }
518 
519     // TODO kill this, use status() and prettyprint model for better output
520     private class StringResultPrinter extends ResultPrinter {
521 
StringResultPrinter(PrintStream writer)522         public StringResultPrinter(PrintStream writer) {
523             super(writer);
524         }
525 
print(TestResult result, long runTime)526         synchronized void print(TestResult result, long runTime) {
527             printHeader(runTime);
528             printFooter(result);
529         }
530     }
531 
532     /**
533      * This class sends status reports back to the IInstrumentationWatcher about
534      * which suite each test belongs.
535      */
536     private class SuiteAssignmentPrinter implements TestListener {
537 
538         private Bundle mTestResult;
539         private long mStartTime;
540         private long mEndTime;
541         private boolean mTimingValid;
542 
SuiteAssignmentPrinter()543         public SuiteAssignmentPrinter() {
544         }
545 
546         /**
547          * send a status for the start of a each test, so long tests can be seen as "running"
548          */
startTest(Test test)549         public void startTest(Test test) {
550             mTimingValid = true;
551             mStartTime = System.currentTimeMillis();
552         }
553 
554         /**
555          * @see junit.framework.TestListener#addError(Test, Throwable)
556          */
addError(Test test, Throwable t)557         public void addError(Test test, Throwable t) {
558             mTimingValid = false;
559         }
560 
561         /**
562          * @see junit.framework.TestListener#addFailure(Test, AssertionFailedError)
563          */
addFailure(Test test, AssertionFailedError t)564         public void addFailure(Test test, AssertionFailedError t) {
565             mTimingValid = false;
566         }
567 
568         /**
569          * @see junit.framework.TestListener#endTest(Test)
570          */
endTest(Test test)571         public void endTest(Test test) {
572             float runTime;
573             String assignmentSuite;
574             mEndTime = System.currentTimeMillis();
575             mTestResult = new Bundle();
576 
577             if (!mTimingValid || mStartTime < 0) {
578                 assignmentSuite = "NA";
579                 runTime = -1;
580             } else {
581                 runTime = mEndTime - mStartTime;
582                 if (runTime < SMALL_SUITE_MAX_RUNTIME
583                         && !InstrumentationTestCase.class.isAssignableFrom(test.getClass())) {
584                     assignmentSuite = SMALL_SUITE;
585                 } else if (runTime < MEDIUM_SUITE_MAX_RUNTIME) {
586                     assignmentSuite = MEDIUM_SUITE;
587                 } else {
588                     assignmentSuite = LARGE_SUITE;
589                 }
590             }
591             // Clear mStartTime so that we can verify that it gets set next time.
592             mStartTime = -1;
593 
594             mTestResult.putString(Instrumentation.REPORT_KEY_STREAMRESULT,
595                     test.getClass().getName() + "#" + ((TestCase) test).getName()
596                     + "\nin " + assignmentSuite + " suite\nrunTime: "
597                     + String.valueOf(runTime) + "\n");
598             mTestResult.putFloat(REPORT_KEY_RUN_TIME, runTime);
599             mTestResult.putString(REPORT_KEY_SUITE_ASSIGNMENT, assignmentSuite);
600 
601             sendStatus(0, mTestResult);
602         }
603     }
604 
605     /**
606      * This class sends status reports back to the IInstrumentationWatcher
607      */
608     private class WatcherResultPrinter implements TestListener, PerformanceResultsWriter {
609         private final Bundle mResultTemplate;
610         Bundle mTestResult;
611         int mTestNum = 0;
612         int mTestResultCode = 0;
613         String mTestClass = null;
614         PerformanceCollector mPerfCollector = new PerformanceCollector();
615         boolean mIsTimedTest = false;
616         boolean mIncludeDetailedStats = false;
617 
WatcherResultPrinter(int numTests)618         public WatcherResultPrinter(int numTests) {
619             mResultTemplate = new Bundle();
620             mResultTemplate.putString(Instrumentation.REPORT_KEY_IDENTIFIER, REPORT_VALUE_ID);
621             mResultTemplate.putInt(REPORT_KEY_NUM_TOTAL, numTests);
622         }
623 
624         /**
625          * send a status for the start of a each test, so long tests can be seen
626          * as "running"
627          */
startTest(Test test)628         public void startTest(Test test) {
629             String testClass = test.getClass().getName();
630             String testName = ((TestCase)test).getName();
631             mTestResult = new Bundle(mResultTemplate);
632             mTestResult.putString(REPORT_KEY_NAME_CLASS, testClass);
633             mTestResult.putString(REPORT_KEY_NAME_TEST, testName);
634             mTestResult.putInt(REPORT_KEY_NUM_CURRENT, ++mTestNum);
635             // pretty printing
636             if (testClass != null && !testClass.equals(mTestClass)) {
637                 mTestResult.putString(Instrumentation.REPORT_KEY_STREAMRESULT,
638                         String.format("\n%s:", testClass));
639                 mTestClass = testClass;
640             } else {
641                 mTestResult.putString(Instrumentation.REPORT_KEY_STREAMRESULT, "");
642             }
643 
644             // The delay_msec parameter is normally used to provide buffers of idle time
645             // for power measurement purposes. To make sure there is a delay before and after
646             // every test in a suite, we delay *after* every test (see endTest below) and also
647             // delay *before* the first test. So, delay test1 delay test2 delay.
648 
649             try {
650                 if (mTestNum == 1) Thread.sleep(mDelayMsec);
651             } catch (InterruptedException e) {
652                 throw new IllegalStateException(e);
653             }
654 
655             sendStatus(REPORT_VALUE_RESULT_START, mTestResult);
656             mTestResultCode = 0;
657 
658             mIsTimedTest = false;
659             mIncludeDetailedStats = false;
660             try {
661                 // Look for TimedTest annotation on both test class and test method
662                 if (test.getClass().getMethod(testName).isAnnotationPresent(TimedTest.class)) {
663                     mIsTimedTest = true;
664                     mIncludeDetailedStats = test.getClass().getMethod(testName).getAnnotation(
665                             TimedTest.class).includeDetailedStats();
666                 } else if (test.getClass().isAnnotationPresent(TimedTest.class)) {
667                     mIsTimedTest = true;
668                     mIncludeDetailedStats = test.getClass().getAnnotation(
669                             TimedTest.class).includeDetailedStats();
670                 }
671             } catch (SecurityException e) {
672                 throw new IllegalStateException(e);
673             } catch (NoSuchMethodException e) {
674                 throw new IllegalStateException(e);
675             }
676 
677             if (mIsTimedTest && mIncludeDetailedStats) {
678                 mPerfCollector.beginSnapshot("");
679             } else if (mIsTimedTest) {
680                 mPerfCollector.startTiming("");
681             }
682         }
683 
684         /**
685          * @see junit.framework.TestListener#addError(Test, Throwable)
686          */
addError(Test test, Throwable t)687         public void addError(Test test, Throwable t) {
688             mTestResult.putString(REPORT_KEY_STACK, BaseTestRunner.getFilteredTrace(t));
689             mTestResultCode = REPORT_VALUE_RESULT_ERROR;
690             // pretty printing
691             mTestResult.putString(Instrumentation.REPORT_KEY_STREAMRESULT,
692                 String.format("\nError in %s:\n%s",
693                     ((TestCase)test).getName(), BaseTestRunner.getFilteredTrace(t)));
694         }
695 
696         /**
697          * @see junit.framework.TestListener#addFailure(Test, AssertionFailedError)
698          */
addFailure(Test test, AssertionFailedError t)699         public void addFailure(Test test, AssertionFailedError t) {
700             mTestResult.putString(REPORT_KEY_STACK, BaseTestRunner.getFilteredTrace(t));
701             mTestResultCode = REPORT_VALUE_RESULT_FAILURE;
702             // pretty printing
703             mTestResult.putString(Instrumentation.REPORT_KEY_STREAMRESULT,
704                 String.format("\nFailure in %s:\n%s",
705                     ((TestCase)test).getName(), BaseTestRunner.getFilteredTrace(t)));
706         }
707 
708         /**
709          * @see junit.framework.TestListener#endTest(Test)
710          */
endTest(Test test)711         public void endTest(Test test) {
712             if (mIsTimedTest && mIncludeDetailedStats) {
713                 mTestResult.putAll(mPerfCollector.endSnapshot());
714             } else if (mIsTimedTest) {
715                 writeStopTiming(mPerfCollector.stopTiming(""));
716             }
717 
718             if (mTestResultCode == 0) {
719                 mTestResult.putString(Instrumentation.REPORT_KEY_STREAMRESULT, ".");
720             }
721             sendStatus(mTestResultCode, mTestResult);
722 
723             try { // Sleep after every test, if specified
724                 Thread.sleep(mDelayMsec);
725             } catch (InterruptedException e) {
726                 throw new IllegalStateException(e);
727             }
728         }
729 
writeBeginSnapshot(String label)730         public void writeBeginSnapshot(String label) {
731             // Do nothing
732         }
733 
writeEndSnapshot(Bundle results)734         public void writeEndSnapshot(Bundle results) {
735             // Copy all snapshot data fields into mResults, which is outputted
736             // via Instrumentation.finish
737             mResults.putAll(results);
738         }
739 
writeStartTiming(String label)740         public void writeStartTiming(String label) {
741             // Do nothing
742         }
743 
writeStopTiming(Bundle results)744         public void writeStopTiming(Bundle results) {
745             // Copy results into mTestResult by flattening list of iterations,
746             // which is outputted via WatcherResultPrinter.endTest
747             int i = 0;
748             for (Parcelable p :
749                     results.getParcelableArrayList(PerformanceCollector.METRIC_KEY_ITERATIONS)) {
750                 Bundle iteration = (Bundle)p;
751                 String index = "iteration" + i + ".";
752                 mTestResult.putString(index + PerformanceCollector.METRIC_KEY_LABEL,
753                         iteration.getString(PerformanceCollector.METRIC_KEY_LABEL));
754                 mTestResult.putLong(index + PerformanceCollector.METRIC_KEY_CPU_TIME,
755                         iteration.getLong(PerformanceCollector.METRIC_KEY_CPU_TIME));
756                 mTestResult.putLong(index + PerformanceCollector.METRIC_KEY_EXECUTION_TIME,
757                         iteration.getLong(PerformanceCollector.METRIC_KEY_EXECUTION_TIME));
758                 i++;
759             }
760         }
761 
writeMeasurement(String label, long value)762         public void writeMeasurement(String label, long value) {
763             mTestResult.putLong(label, value);
764         }
765 
writeMeasurement(String label, float value)766         public void writeMeasurement(String label, float value) {
767             mTestResult.putFloat(label, value);
768         }
769 
writeMeasurement(String label, String value)770         public void writeMeasurement(String label, String value) {
771             mTestResult.putString(label, value);
772         }
773 
774         // TODO report the end of the cycle
775     }
776 }
777