1 /* 2 * Copyright (C) 2011 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 package com.android.tradefed.testtype; 18 19 import com.android.ddmlib.FileListingService; 20 import com.android.ddmlib.Log; 21 import com.android.tradefed.config.Option; 22 import com.android.tradefed.config.OptionClass; 23 import com.android.tradefed.device.DeviceNotAvailableException; 24 import com.android.tradefed.device.IFileEntry; 25 import com.android.tradefed.device.ITestDevice; 26 import com.android.tradefed.result.ITestInvocationListener; 27 import com.android.tradefed.util.proto.TfMetricProtoUtil; 28 29 import com.google.common.annotations.VisibleForTesting; 30 31 import java.util.ArrayList; 32 import java.util.Collection; 33 import java.util.HashMap; 34 import java.util.Map; 35 import java.util.concurrent.TimeUnit; 36 37 /** 38 * A Test that runs a native benchmark test executable on given device. 39 * <p/> 40 * It uses {@link NativeBenchmarkTestParser} to parse out the average operation time vs delay 41 * between operations those results to the {@link ITestInvocationListener}s. 42 */ 43 @OptionClass(alias = "native-benchmark") 44 public class NativeBenchmarkTest implements IDeviceTest, IRemoteTest { 45 46 private static final String LOG_TAG = "NativeStressTest"; 47 static final String DEFAULT_TEST_PATH = "data/nativebenchmark"; 48 49 // The metrics key names to report to listeners 50 static final String AVG_OP_TIME_KEY_PREFIX = "avg-operation-time"; 51 static final String ITERATION_KEY = "iterations"; 52 53 private ITestDevice mDevice = null; 54 55 @Option(name = "native-benchmark-device-path", 56 description="The path on the device where native stress tests are located.") 57 private String mDeviceTestPath = DEFAULT_TEST_PATH; 58 59 @Option(name = "benchmark-module-name", 60 description="The name of the native benchmark test module to run. " + 61 "If not specified all tests in --native-benchmark-device-path will be run.") 62 private String mTestModule = null; 63 64 @Option(name = "benchmark-run-name", 65 description="Optional name to pass to test reporters. If unspecified, will use" + 66 "--benchmark-module-name.") 67 private String mReportRunName = null; 68 69 @Option(name = "iterations", 70 description="The number of benchmark test iterations per run.") 71 private int mNumIterations = 1000; 72 73 @Option(name = "delay-per-run", 74 description="The delay between each benchmark iteration, in micro seconds." + 75 "Multiple values may be given to specify multiple runs with different delay values.") 76 // TODO: change units to seconds for consistency with native benchmark module input 77 private Collection<Integer> mDelays = new ArrayList<Integer>(); 78 79 @Option(name = "max-run-time", description = 80 "The maximum time to allow for one benchmark run in ms.") 81 private int mMaxRunTime = 5 * 60 * 1000; 82 83 @Option(name = "server-cpu", 84 description="Optionally specify a server cpu.") 85 private int mServerCpu = 1; 86 87 @Option(name = "client-cpu", 88 description="Optionally specify a client cpu.") 89 private int mClientCpu = 1; 90 91 @Option(name = "max-cpu-freq", 92 description="Flag to force device cpu to run at maximum frequency.") 93 private boolean mMaxCpuFreq = false; 94 95 96 // TODO: consider sharing code with {@link GTest} and {@link NativeStressTest} 97 98 /** 99 * {@inheritDoc} 100 */ 101 @Override setDevice(ITestDevice device)102 public void setDevice(ITestDevice device) { 103 mDevice = device; 104 } 105 106 /** 107 * {@inheritDoc} 108 */ 109 @Override getDevice()110 public ITestDevice getDevice() { 111 return mDevice; 112 } 113 114 /** 115 * Set the Android native benchmark test module to run. 116 * 117 * @param moduleName The name of the native test module to run 118 */ setModuleName(String moduleName)119 public void setModuleName(String moduleName) { 120 mTestModule = moduleName; 121 } 122 123 /** 124 * Get the Android native benchmark test module to run. 125 * 126 * @return the name of the native test module to run, or null if not set 127 */ getModuleName()128 public String getModuleName() { 129 return mTestModule; 130 } 131 132 /** 133 * Set the number of iterations to execute per run 134 */ setNumIterations(int iterations)135 void setNumIterations(int iterations) { 136 mNumIterations = iterations; 137 } 138 139 /** 140 * Set the delay values per run 141 */ addDelaysPerRun(Collection<Integer> delays)142 void addDelaysPerRun(Collection<Integer> delays) { 143 mDelays.addAll(delays); 144 } 145 146 /** 147 * Gets the path where native benchmark tests live on the device. 148 * 149 * @return The path on the device where the native tests live. 150 */ 151 @VisibleForTesting getTestPath()152 String getTestPath() { 153 StringBuilder testPath = new StringBuilder(mDeviceTestPath); 154 if (mTestModule != null) { 155 testPath.append(FileListingService.FILE_SEPARATOR); 156 testPath.append(mTestModule); 157 } 158 return testPath.toString(); 159 } 160 161 /** 162 * Executes all native benchmark tests in a folder as well as in all subfolders recursively. 163 * 164 * @param rootEntry The root folder to begin searching for native tests 165 * @param testDevice The device to run tests on 166 * @param listener the run listener 167 * @throws DeviceNotAvailableException 168 */ 169 @VisibleForTesting doRunAllTestsInSubdirectory( IFileEntry rootEntry, ITestDevice testDevice, ITestInvocationListener listener)170 void doRunAllTestsInSubdirectory( 171 IFileEntry rootEntry, ITestDevice testDevice, ITestInvocationListener listener) 172 throws DeviceNotAvailableException { 173 174 if (rootEntry.isDirectory()) { 175 // recursively run tests in all subdirectories 176 for (IFileEntry childEntry : rootEntry.getChildren(true)) { 177 doRunAllTestsInSubdirectory(childEntry, testDevice, listener); 178 } 179 } else { 180 // assume every file is a valid benchmark test binary. 181 // use name of file as run name 182 String runName = (mReportRunName == null ? rootEntry.getName() : mReportRunName); 183 String fullPath = rootEntry.getFullEscapedPath(); 184 if (mDelays.size() == 0) { 185 // default to one run with no delay 186 mDelays.add(0); 187 } 188 189 // force file to be executable 190 testDevice.executeShellCommand(String.format("chmod 755 %s", fullPath)); 191 long startTime = System.currentTimeMillis(); 192 193 listener.testRunStarted(runName, 0); 194 Map<String, String> metricMap = new HashMap<String, String>(); 195 metricMap.put(ITERATION_KEY, Integer.toString(mNumIterations)); 196 try { 197 for (Integer delay : mDelays) { 198 NativeBenchmarkTestParser resultParser = createResultParser(runName); 199 // convert delay to seconds 200 double delayFloat = ((double)delay)/1000000; 201 Log.i(LOG_TAG, String.format("Running %s for %d iterations with delay %f", 202 rootEntry.getName(), mNumIterations, delayFloat)); 203 String cmd = String.format("%s -n %d -d %f -c %d -s %d", fullPath, 204 mNumIterations, delayFloat, mClientCpu, mServerCpu); 205 Log.i(LOG_TAG, String.format("Running native benchmark test on %s: %s", 206 mDevice.getSerialNumber(), cmd)); 207 testDevice.executeShellCommand(cmd, resultParser, 208 mMaxRunTime, TimeUnit.MILLISECONDS, 0); 209 addMetric(metricMap, resultParser, delay); 210 } 211 // TODO: is catching exceptions, and reporting testRunFailed necessary? 212 } finally { 213 final long elapsedTime = System.currentTimeMillis() - startTime; 214 listener.testRunEnded(elapsedTime, TfMetricProtoUtil.upgradeConvert(metricMap)); 215 } 216 } 217 } 218 219 /** 220 * Adds the operation time metric for a run with given delay 221 * 222 * @param metricMap 223 * @param resultParser 224 * @param delay 225 */ addMetric(Map<String, String> metricMap, NativeBenchmarkTestParser resultParser, Integer delay)226 private void addMetric(Map<String, String> metricMap, NativeBenchmarkTestParser resultParser, 227 Integer delay) { 228 String metricKey = String.format("%s-delay%d", AVG_OP_TIME_KEY_PREFIX, delay); 229 // temporarily convert seconds to microseconds, as some reporters cannot handle small values 230 metricMap.put(metricKey, Double.toString(resultParser.getAvgOperationTime()*1000000)); 231 } 232 233 /** 234 * Factory method for creating a {@link NativeBenchmarkTestParser} that parses test output 235 * <p/> 236 * Exposed so unit tests can mock. 237 * 238 * @param runName 239 * @return a {@link NativeBenchmarkTestParser} 240 */ createResultParser(String runName)241 NativeBenchmarkTestParser createResultParser(String runName) { 242 return new NativeBenchmarkTestParser(runName); 243 } 244 245 /** 246 * {@inheritDoc} 247 */ 248 @Override run(ITestInvocationListener listener)249 public void run(ITestInvocationListener listener) throws DeviceNotAvailableException { 250 if (mDevice == null) { 251 throw new IllegalArgumentException("Device has not been set"); 252 } 253 254 String testPath = getTestPath(); 255 IFileEntry nativeTestDirectory = mDevice.getFileEntry(testPath); 256 if (nativeTestDirectory == null) { 257 Log.w(LOG_TAG, String.format("Could not find native benchmark test directory %s in %s!", 258 testPath, mDevice.getSerialNumber())); 259 return; 260 } 261 if (mMaxCpuFreq) { 262 mDevice.executeShellCommand( 263 "cat /sys/devices/system/cpu/cpu0/cpufreq/cpuinfo_max_freq > " + 264 "/sys/devices/system/cpu/cpu0/cpufreq/scaling_min_freq"); 265 } 266 doRunAllTestsInSubdirectory(nativeTestDirectory, mDevice, listener); 267 if (mMaxCpuFreq) { 268 // revert to normal 269 mDevice.executeShellCommand( 270 "cat /sys/devices/system/cpu/cpu0/cpufreq/cpuinfo_min_freq > " + 271 "/sys/devices/system/cpu/cpu0/cpufreq/scaling_min_freq"); 272 } 273 274 } 275 } 276